From 66ef31882341852229c74996867916fbd4a2fe2a Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 9 Sep 2024 13:22:16 +0200 Subject: [PATCH 001/762] project panel: select autofolded entries (#17520) Closes #17252 Release Notes: - Intermediate auto-folded project entries can now be selected and acted upon (removed, renamed, cut, pasted). --- crates/project_panel/src/project_panel.rs | 238 +++++++++++++++--- .../src/components/label/highlighted_label.rs | 5 + crates/ui/src/components/label/label.rs | 5 + crates/ui/src/components/label/label_like.rs | 22 +- 4 files changed, 238 insertions(+), 32 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 9d73557761343a..be945cde773bbc 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -60,11 +60,15 @@ pub struct ProjectPanel { scroll_handle: UniformListScrollHandle, focus_handle: FocusHandle, visible_entries: Vec<(WorktreeId, Vec, OnceCell>>)>, + /// Maps from leaf project entry ID to the currently selected ancestor. + /// Relevant only for auto-fold dirs, where a single project panel entry may actually consist of several + /// project entries (and all non-leaf nodes are guaranteed to be directories). + ancestors: HashMap, last_worktree_root_id: Option, last_external_paths_drag_over_entry: Option, expanded_dir_ids: HashMap>, unfolded_dir_ids: HashSet, - // Currently selected entry in a file tree + // Currently selected leaf entry (see auto-folding for a definition of that) in a file tree selection: Option, marked_entries: BTreeSet, context_menu: Option<(View, Point, Subscription)>, @@ -96,7 +100,7 @@ enum ClipboardEntry { } #[derive(Debug, PartialEq, Eq, Clone)] -pub struct EntryDetails { +struct EntryDetails { filename: String, icon: Option, path: Arc, @@ -111,18 +115,19 @@ pub struct EntryDetails { is_cut: bool, git_status: Option, is_private: bool, + is_auto_folded: bool, worktree_id: WorktreeId, canonical_path: Option>, } #[derive(PartialEq, Clone, Default, Debug, Deserialize)] -pub struct Delete { +struct Delete { #[serde(default)] pub skip_prompt: bool, } #[derive(PartialEq, Clone, Default, Debug, Deserialize)] -pub struct Trash { +struct Trash { #[serde(default)] pub skip_prompt: bool, } @@ -155,6 +160,18 @@ actions!( ] ); +#[derive(Debug, Default)] +struct FoldedAncestors { + current_ancestor_depth: usize, + ancestors: Vec, +} + +impl FoldedAncestors { + fn max_ancestor_depth(&self) -> usize { + self.ancestors.len() + } +} + pub fn init_settings(cx: &mut AppContext) { ProjectPanelSettings::register(cx); } @@ -277,6 +294,7 @@ impl ProjectPanel { scroll_handle: UniformListScrollHandle::new(), focus_handle, visible_entries: Default::default(), + ancestors: Default::default(), last_worktree_root_id: Default::default(), last_external_paths_drag_over_entry: None, expanded_dir_ids: Default::default(), @@ -457,7 +475,7 @@ impl ProjectPanel { entry_id, }); - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { let auto_fold_dirs = ProjectPanelSettings::get_global(cx).auto_fold_dirs; let is_root = Some(entry) == worktree.root_entry(); let is_dir = entry.is_dir(); @@ -583,6 +601,13 @@ impl ProjectPanel { fn expand_selected_entry(&mut self, _: &ExpandSelectedEntry, cx: &mut ViewContext) { if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) { + if folded_ancestors.current_ancestor_depth > 0 { + folded_ancestors.current_ancestor_depth -= 1; + cx.notify(); + return; + } + } if entry.is_dir() { let worktree_id = worktree.id(); let entry_id = entry.id; @@ -611,6 +636,13 @@ impl ProjectPanel { fn collapse_selected_entry(&mut self, _: &CollapseSelectedEntry, cx: &mut ViewContext) { if let Some((worktree, mut entry)) = self.selected_entry(cx) { + if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) { + if folded_ancestors.current_ancestor_depth < folded_ancestors.max_ancestor_depth() { + folded_ancestors.current_ancestor_depth += 1; + cx.notify(); + return; + } + } let worktree_id = worktree.id(); let expanded_dir_ids = if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) { @@ -943,6 +975,17 @@ impl ProjectPanel { } } + fn unflatten_entry_id(&self, leaf_entry_id: ProjectEntryId) -> ProjectEntryId { + if let Some(ancestors) = self.ancestors.get(&leaf_entry_id) { + ancestors + .ancestors + .get(ancestors.current_ancestor_depth) + .copied() + .unwrap_or(leaf_entry_id) + } else { + leaf_entry_id + } + } fn rename(&mut self, _: &Rename, cx: &mut ViewContext) { if let Some(SelectedEntry { worktree_id, @@ -950,6 +993,7 @@ impl ProjectPanel { }) = self.selection { if let Some(worktree) = self.project.read(cx).worktree_for_id(worktree_id, cx) { + let entry_id = self.unflatten_entry_id(entry_id); if let Some(entry) = worktree.read(cx).entry_for_id(entry_id) { self.edit_state = Some(EditState { worktree_id, @@ -1161,7 +1205,7 @@ impl ProjectPanel { } fn select_parent(&mut self, _: &SelectParent, cx: &mut ViewContext) { - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { if let Some(parent) = entry.path.parent() { if let Some(parent_entry) = worktree.entry_for_path(parent) { self.selection = Some(SelectedEntry { @@ -1447,13 +1491,13 @@ impl ProjectPanel { } fn reveal_in_finder(&mut self, _: &RevealInFileManager, cx: &mut ViewContext) { - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { cx.reveal_path(&worktree.abs_path().join(&entry.path)); } } fn open_in_terminal(&mut self, _: &OpenInTerminal, cx: &mut ViewContext) { - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { let abs_path = worktree.abs_path().join(&entry.path); let working_directory = if entry.is_dir() { Some(abs_path) @@ -1476,7 +1520,7 @@ impl ProjectPanel { _: &NewSearchInDirectory, cx: &mut ViewContext, ) { - if let Some((worktree, entry)) = self.selected_entry(cx) { + if let Some((worktree, entry)) = self.selected_sub_entry(cx) { if entry.is_dir() { let include_root = self.project.read(cx).visible_worktrees(cx).count() > 1; let dir_path = if include_root { @@ -1596,15 +1640,36 @@ impl ProjectPanel { // Returns list of entries that should be affected by an operation. // When currently selected entry is not marked, it's treated as the only marked entry. fn marked_entries(&self) -> BTreeSet { - let Some(selection) = self.selection else { + let Some(mut selection) = self.selection else { return Default::default(); }; if self.marked_entries.contains(&selection) { - self.marked_entries.clone() + self.marked_entries + .iter() + .copied() + .map(|mut entry| { + entry.entry_id = self.resolve_entry(entry.entry_id); + entry + }) + .collect() } else { + selection.entry_id = self.resolve_entry(selection.entry_id); BTreeSet::from_iter([selection]) } } + + fn resolve_entry(&self, id: ProjectEntryId) -> ProjectEntryId { + self.ancestors + .get(&id) + .and_then(|ancestors| { + if ancestors.current_ancestor_depth == 0 { + return None; + } + ancestors.ancestors.get(ancestors.current_ancestor_depth) + }) + .copied() + .unwrap_or(id) + } pub fn selected_entry<'a>( &self, cx: &'a AppContext, @@ -1613,6 +1678,21 @@ impl ProjectPanel { Some((worktree.read(cx), entry)) } + /// Compared to selected_entry, this function resolves to the currently + /// selected subentry if dir auto-folding is enabled. + fn selected_sub_entry<'a>( + &self, + cx: &'a AppContext, + ) -> Option<(&'a Worktree, &'a project::Entry)> { + let (worktree, mut entry) = self.selected_entry_handle(cx)?; + + let worktree = worktree.read(cx); + let resolved_id = self.resolve_entry(entry.id); + if resolved_id != entry.id { + entry = worktree.entry_for_id(resolved_id)?; + } + Some((worktree, entry)) + } fn selected_entry_handle<'a>( &self, cx: &'a AppContext, @@ -1655,6 +1735,7 @@ impl ProjectPanel { .and_then(|worktree| worktree.read(cx).root_entry()) .map(|entry| entry.id); + let old_ancestors = std::mem::take(&mut self.ancestors); self.visible_entries.clear(); for worktree in project.visible_worktrees(cx) { let snapshot = worktree.read(cx).snapshot(); @@ -1688,25 +1769,42 @@ impl ProjectPanel { let mut visible_worktree_entries = Vec::new(); let mut entry_iter = snapshot.entries(true, 0); + let mut auto_folded_ancestors = vec![]; while let Some(entry) = entry_iter.entry() { - if auto_collapse_dirs - && entry.kind.is_dir() - && !self.unfolded_dir_ids.contains(&entry.id) - { - if let Some(root_path) = snapshot.root_entry() { - let mut child_entries = snapshot.child_entries(&entry.path); - if let Some(child) = child_entries.next() { - if entry.path != root_path.path - && child_entries.next().is_none() - && child.kind.is_dir() - { - entry_iter.advance(); - continue; + if auto_collapse_dirs && entry.kind.is_dir() { + auto_folded_ancestors.push(entry.id); + if !self.unfolded_dir_ids.contains(&entry.id) { + if let Some(root_path) = snapshot.root_entry() { + let mut child_entries = snapshot.child_entries(&entry.path); + if let Some(child) = child_entries.next() { + if entry.path != root_path.path + && child_entries.next().is_none() + && child.kind.is_dir() + { + entry_iter.advance(); + + continue; + } } } } + let depth = old_ancestors + .get(&entry.id) + .map(|ancestor| ancestor.current_ancestor_depth) + .unwrap_or_default(); + let mut ancestors = std::mem::take(&mut auto_folded_ancestors); + if ancestors.len() > 1 { + ancestors.reverse(); + self.ancestors.insert( + entry.id, + FoldedAncestors { + current_ancestor_depth: depth, + ancestors, + }, + ); + } } - + auto_folded_ancestors.clear(); visible_worktree_entries.push(entry.clone()); if Some(entry.id) == new_entry_parent_id { visible_worktree_entries.push(Entry { @@ -1999,6 +2097,7 @@ impl ProjectPanel { .map_or(false, |e| e.is_cut() && e.items().contains(&selection)), git_status: status, is_private: entry.is_private, + is_auto_folded: difference > 1, worktree_id: *worktree_id, canonical_path: entry.canonical_path.clone(), }; @@ -2008,6 +2107,15 @@ impl ProjectPanel { entry.id == NEW_ENTRY_ID } else { entry.id == edit_state.entry_id + || self + .ancestors + .get(&entry.id) + .is_some_and(|auto_folded_dirs| { + auto_folded_dirs + .ancestors + .iter() + .any(|entry_id| *entry_id == edit_state.entry_id) + }) }; if is_edited_entry { @@ -2102,6 +2210,7 @@ impl ProjectPanel { active_selection: selection, marked_selections: selections, }; + let is_auto_folded = details.is_auto_folded; div() .id(entry_id.to_proto() as usize) .on_drag_move::(cx.listener( @@ -2202,11 +2311,78 @@ impl ProjectPanel { if let (Some(editor), true) = (Some(&self.filename_editor), show_editor) { h_flex().h_6().w_full().child(editor.clone()) } else { - h_flex().h_6().child( - Label::new(file_name) - .single_line() - .color(filename_text_color), - ) + h_flex().h_6().map(|this| { + if is_auto_folded && is_active { + let folded_ancestors = self.ancestors.get(&entry_id).unwrap(); + let Some(part_to_highlight) = Path::new(&file_name) + .ancestors() + .nth(folded_ancestors.current_ancestor_depth) + else { + return this; + }; + + let suffix = Path::new(&file_name) + .strip_prefix(part_to_highlight) + .ok() + .filter(|suffix| !suffix.as_os_str().is_empty()); + let prefix = part_to_highlight + .parent() + .filter(|prefix| !prefix.as_os_str().is_empty()); + let Some(part_to_highlight) = part_to_highlight + .file_name() + .and_then(|name| name.to_str().map(String::from)) + else { + return this; + }; + + this.children(prefix.and_then(|prefix| { + Some( + h_flex() + .child( + Label::new(prefix.to_str().map(String::from)?) + .single_line() + .color(filename_text_color), + ) + .child( + Label::new(std::path::MAIN_SEPARATOR_STR) + .single_line() + .color(filename_text_color), + ), + ) + })) + .child( + Label::new(part_to_highlight) + .single_line() + .color(filename_text_color) + .underline(true), + ) + .children( + suffix.and_then(|suffix| { + Some( + h_flex() + .child( + Label::new(std::path::MAIN_SEPARATOR_STR) + .single_line() + .color(filename_text_color), + ) + .child( + Label::new( + suffix.to_str().map(String::from)?, + ) + .single_line() + .color(filename_text_color), + ), + ) + }), + ) + } else { + this.child( + Label::new(file_name) + .single_line() + .color(filename_text_color), + ) + } + }) } .ml_1(), ) @@ -2551,7 +2727,7 @@ impl Render for ProjectPanel { .child( uniform_list(cx.view().clone(), "entries", items_count, { |this, range, cx| { - let mut items = Vec::new(); + let mut items = Vec::with_capacity(range.end - range.start); this.for_each_visible_entry(range, cx, |id, details, cx| { items.push(this.render_entry(id, details, cx)); }); diff --git a/crates/ui/src/components/label/highlighted_label.rs b/crates/ui/src/components/label/highlighted_label.rs index ab71ffdc4c7204..6b170bb810f42b 100644 --- a/crates/ui/src/components/label/highlighted_label.rs +++ b/crates/ui/src/components/label/highlighted_label.rs @@ -58,6 +58,11 @@ impl LabelCommon for HighlightedLabel { self.base = self.base.alpha(alpha); self } + + fn underline(mut self, underline: bool) -> Self { + self.base = self.base.underline(underline); + self + } } pub fn highlight_ranges( diff --git a/crates/ui/src/components/label/label.rs b/crates/ui/src/components/label/label.rs index f29e4656e933ca..898a59de775d9e 100644 --- a/crates/ui/src/components/label/label.rs +++ b/crates/ui/src/components/label/label.rs @@ -170,6 +170,11 @@ impl LabelCommon for Label { self.base = self.base.alpha(alpha); self } + + fn underline(mut self, underline: bool) -> Self { + self.base = self.base.underline(underline); + self + } } impl RenderOnce for Label { diff --git a/crates/ui/src/components/label/label_like.rs b/crates/ui/src/components/label/label_like.rs index a59c93e31d6c5d..bc2fae15a7fdc4 100644 --- a/crates/ui/src/components/label/label_like.rs +++ b/crates/ui/src/components/label/label_like.rs @@ -1,4 +1,4 @@ -use gpui::{relative, AnyElement, FontWeight, StyleRefinement, Styled}; +use gpui::{relative, AnyElement, FontWeight, StyleRefinement, Styled, UnderlineStyle}; use settings::Settings; use smallvec::SmallVec; use theme::ThemeSettings; @@ -42,6 +42,9 @@ pub trait LabelCommon { /// Sets the italic property of the label. fn italic(self, italic: bool) -> Self; + /// Sets the underline property of the label + fn underline(self, underline: bool) -> Self; + /// Sets the alpha property of the label, overwriting the alpha value of the color. fn alpha(self, alpha: f32) -> Self; } @@ -57,6 +60,7 @@ pub struct LabelLike { italic: bool, children: SmallVec<[AnyElement; 2]>, alpha: Option, + underline: bool, } impl Default for LabelLike { @@ -77,6 +81,7 @@ impl LabelLike { italic: false, children: SmallVec::new(), alpha: None, + underline: false, } } } @@ -123,6 +128,11 @@ impl LabelCommon for LabelLike { self } + fn underline(mut self, underline: bool) -> Self { + self.underline = underline; + self + } + fn alpha(mut self, alpha: f32) -> Self { self.alpha = Some(alpha); self @@ -165,6 +175,16 @@ impl RenderOnce for LabelLike { this.line_height(relative(1.)) }) .when(self.italic, |this| this.italic()) + .when(self.underline, |mut this| { + this.text_style() + .get_or_insert_with(Default::default) + .underline = Some(UnderlineStyle { + thickness: px(1.), + color: None, + wavy: false, + }); + this + }) .text_color(color) .font_weight(self.weight.unwrap_or(settings.ui_font.weight)) .children(self.children) From fcf79c0f1d6ec5153fef7ee3191450e50749dff9 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 9 Sep 2024 15:01:26 +0200 Subject: [PATCH 002/762] assistant: Support copy/pasting creases (#17490) https://github.com/user-attachments/assets/78a2572d-8e8f-4206-9680-dcd884e7bbbd Release Notes: - Added support for copying and pasting slash commands in the assistant panel --------- Co-authored-by: Thorsten --- crates/assistant/src/assistant_panel.rs | 233 ++++++++++++++++---- crates/editor/src/display_map/crease_map.rs | 92 +++++++- 2 files changed, 280 insertions(+), 45 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index fd5f62e1881f99..0828b9b9916725 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -26,8 +26,8 @@ use collections::{BTreeSet, HashMap, HashSet}; use editor::{ actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt}, display_map::{ - BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease, CustomBlockId, FoldId, - RenderBlock, ToDisplayPoint, + BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease, CreaseMetadata, + CustomBlockId, FoldId, RenderBlock, ToDisplayPoint, }, scroll::{Autoscroll, AutoscrollStrategy, ScrollAnchor}, Anchor, Editor, EditorEvent, ExcerptRange, MultiBuffer, RowExt, ToOffset as _, ToPoint, @@ -54,13 +54,13 @@ use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; use project::{Project, ProjectLspAdapterDelegate, Worktree}; use search::{buffer_search::DivRegistrar, BufferSearchBar}; +use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings}; use smol::stream::StreamExt; use std::{ borrow::Cow, cmp, collections::hash_map, - fmt::Write, ops::{ControlFlow, Range}, path::PathBuf, sync::Arc, @@ -2491,20 +2491,26 @@ impl ContextEditor { .unwrap(); let buffer_row = MultiBufferRow(start.to_point(&buffer).row); buffer_rows_to_fold.insert(buffer_row); - creases.push(Crease::new( - start..end, - FoldPlaceholder { - render: render_fold_icon_button( - cx.view().downgrade(), - section.icon, - section.label.clone(), - ), - constrain_width: false, - merge_adjacent: false, - }, - render_slash_command_output_toggle, - |_, _, _| Empty.into_any_element(), - )); + creases.push( + Crease::new( + start..end, + FoldPlaceholder { + render: render_fold_icon_button( + cx.view().downgrade(), + section.icon, + section.label.clone(), + ), + constrain_width: false, + merge_adjacent: false, + }, + render_slash_command_output_toggle, + |_, _, _| Empty.into_any_element(), + ) + .with_metadata(CreaseMetadata { + icon: section.icon, + label: section.label, + }), + ); } editor.insert_creases(creases, cx); @@ -3318,39 +3324,113 @@ impl ContextEditor { } fn copy(&mut self, _: &editor::actions::Copy, cx: &mut ViewContext) { - let editor = self.editor.read(cx); - let context = self.context.read(cx); - if editor.selections.count() == 1 { - let selection = editor.selections.newest::(cx); - let mut copied_text = String::new(); - let mut spanned_messages = 0; - for message in context.messages(cx) { - if message.offset_range.start >= selection.range().end { - break; - } else if message.offset_range.end >= selection.range().start { - let range = cmp::max(message.offset_range.start, selection.range().start) - ..cmp::min(message.offset_range.end, selection.range().end); - if !range.is_empty() { - spanned_messages += 1; - write!(&mut copied_text, "## {}\n\n", message.role).unwrap(); - for chunk in context.buffer().read(cx).text_for_range(range) { - copied_text.push_str(chunk); + if self.editor.read(cx).selections.count() == 1 { + let (copied_text, metadata) = self.get_clipboard_contents(cx); + cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( + copied_text, + metadata, + )); + cx.stop_propagation(); + return; + } + + cx.propagate(); + } + + fn cut(&mut self, _: &editor::actions::Cut, cx: &mut ViewContext) { + if self.editor.read(cx).selections.count() == 1 { + let (copied_text, metadata) = self.get_clipboard_contents(cx); + + self.editor.update(cx, |editor, cx| { + let selections = editor.selections.all::(cx); + + editor.transact(cx, |this, cx| { + this.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.select(selections); + }); + this.insert("", cx); + cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( + copied_text, + metadata, + )); + }); + }); + + cx.stop_propagation(); + return; + } + + cx.propagate(); + } + + fn get_clipboard_contents(&mut self, cx: &mut ViewContext) -> (String, CopyMetadata) { + let creases = self.editor.update(cx, |editor, cx| { + let selection = editor.selections.newest::(cx); + let selection_start = editor.selections.newest::(cx).start; + let snapshot = editor.buffer().read(cx).snapshot(cx); + editor.display_map.update(cx, |display_map, cx| { + display_map + .snapshot(cx) + .crease_snapshot + .creases_in_range( + MultiBufferRow(selection.start.row)..MultiBufferRow(selection.end.row + 1), + &snapshot, + ) + .filter_map(|crease| { + if let Some(metadata) = &crease.metadata { + let start = crease + .range + .start + .to_offset(&snapshot) + .saturating_sub(selection_start); + let end = crease + .range + .end + .to_offset(&snapshot) + .saturating_sub(selection_start); + + let range_relative_to_selection = start..end; + + if range_relative_to_selection.is_empty() { + None + } else { + Some(SelectedCreaseMetadata { + range_relative_to_selection, + crease: metadata.clone(), + }) + } + } else { + None } - copied_text.push('\n'); + }) + .collect::>() + }) + }); + + let context = self.context.read(cx); + let selection = self.editor.read(cx).selections.newest::(cx); + let mut text = String::new(); + for message in context.messages(cx) { + if message.offset_range.start >= selection.range().end { + break; + } else if message.offset_range.end >= selection.range().start { + let range = cmp::max(message.offset_range.start, selection.range().start) + ..cmp::min(message.offset_range.end, selection.range().end); + if !range.is_empty() { + for chunk in context.buffer().read(cx).text_for_range(range) { + text.push_str(chunk); } + text.push('\n'); } } - - if spanned_messages > 1 { - cx.write_to_clipboard(ClipboardItem::new_string(copied_text)); - return; - } } - cx.propagate(); + (text, CopyMetadata { creases }) } - fn paste(&mut self, _: &editor::actions::Paste, cx: &mut ViewContext) { + fn paste(&mut self, action: &editor::actions::Paste, cx: &mut ViewContext) { + cx.stop_propagation(); + let images = if let Some(item) = cx.read_from_clipboard() { item.into_entries() .filter_map(|entry| { @@ -3365,9 +3445,62 @@ impl ContextEditor { Vec::new() }; + let metadata = if let Some(item) = cx.read_from_clipboard() { + item.entries().first().and_then(|entry| { + if let ClipboardEntry::String(text) = entry { + text.metadata_json::() + } else { + None + } + }) + } else { + None + }; + if images.is_empty() { - // If we didn't find any valid image data to paste, propagate to let normal pasting happen. - cx.propagate(); + self.editor.update(cx, |editor, cx| { + let paste_position = editor.selections.newest::(cx).head(); + editor.paste(action, cx); + + if let Some(metadata) = metadata { + let buffer = editor.buffer().read(cx).snapshot(cx); + + let mut buffer_rows_to_fold = BTreeSet::new(); + let weak_editor = cx.view().downgrade(); + editor.insert_creases( + metadata.creases.into_iter().map(|metadata| { + let start = buffer.anchor_after( + paste_position + metadata.range_relative_to_selection.start, + ); + let end = buffer.anchor_before( + paste_position + metadata.range_relative_to_selection.end, + ); + + let buffer_row = MultiBufferRow(start.to_point(&buffer).row); + buffer_rows_to_fold.insert(buffer_row); + Crease::new( + start..end, + FoldPlaceholder { + constrain_width: false, + render: render_fold_icon_button( + weak_editor.clone(), + metadata.crease.icon, + metadata.crease.label.clone(), + ), + merge_adjacent: false, + }, + render_slash_command_output_toggle, + |_, _, _| Empty.into_any(), + ) + .with_metadata(metadata.crease.clone()) + }), + cx, + ); + for buffer_row in buffer_rows_to_fold.into_iter().rev() { + editor.fold_at(&FoldAt { buffer_row }, cx); + } + } + }); } else { let mut image_positions = Vec::new(); self.editor.update(cx, |editor, cx| { @@ -4037,6 +4170,17 @@ fn render_fold_icon_button( }) } +#[derive(Debug, Clone, Serialize, Deserialize)] +struct CopyMetadata { + creases: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct SelectedCreaseMetadata { + range_relative_to_selection: Range, + crease: CreaseMetadata, +} + impl EventEmitter for ContextEditor {} impl EventEmitter for ContextEditor {} @@ -4062,6 +4206,7 @@ impl Render for ContextEditor { .capture_action(cx.listener(ContextEditor::cancel)) .capture_action(cx.listener(ContextEditor::save)) .capture_action(cx.listener(ContextEditor::copy)) + .capture_action(cx.listener(ContextEditor::cut)) .capture_action(cx.listener(ContextEditor::paste)) .capture_action(cx.listener(ContextEditor::cycle_message_role)) .capture_action(cx.listener(ContextEditor::confirm_command)) diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index 9aa2728dca8c45..10ee125b3237ac 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -1,10 +1,11 @@ use collections::HashMap; use gpui::{AnyElement, IntoElement}; use multi_buffer::{Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, ToPoint}; +use serde::{Deserialize, Serialize}; use std::{cmp::Ordering, ops::Range, sync::Arc}; use sum_tree::{Bias, SeekTarget, SumTree}; use text::Point; -use ui::WindowContext; +use ui::{IconName, SharedString, WindowContext}; use crate::FoldPlaceholder; @@ -49,6 +50,31 @@ impl CreaseSnapshot { None } + pub fn creases_in_range<'a>( + &'a self, + range: Range, + snapshot: &'a MultiBufferSnapshot, + ) -> impl '_ + Iterator { + let start = snapshot.anchor_before(Point::new(range.start.0, 0)); + let mut cursor = self.creases.cursor::(); + cursor.seek(&start, Bias::Left, snapshot); + + std::iter::from_fn(move || { + while let Some(item) = cursor.item() { + cursor.next(snapshot); + let crease_start = item.crease.range.start.to_point(snapshot); + let crease_end = item.crease.range.end.to_point(snapshot); + if crease_end.row > range.end.0 { + continue; + } + if crease_start.row >= range.start.0 && crease_end.row < range.end.0 { + return Some(&item.crease); + } + } + None + }) + } + pub fn crease_items_with_offsets( &self, snapshot: &MultiBufferSnapshot, @@ -87,6 +113,14 @@ pub struct Crease { pub placeholder: FoldPlaceholder, pub render_toggle: RenderToggleFn, pub render_trailer: RenderTrailerFn, + pub metadata: Option, +} + +/// Metadata about a [`Crease`], that is used for serialization. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct CreaseMetadata { + pub icon: IconName, + pub label: SharedString, } impl Crease { @@ -124,8 +158,14 @@ impl Crease { render_trailer: Arc::new(move |row, folded, cx| { render_trailer(row, folded, cx).into_any_element() }), + metadata: None, } } + + pub fn with_metadata(mut self, metadata: CreaseMetadata) -> Self { + self.metadata = Some(metadata); + self + } } impl std::fmt::Debug for Crease { @@ -304,4 +344,54 @@ mod test { .query_row(MultiBufferRow(3), &snapshot) .is_none()); } + + #[gpui::test] + fn test_creases_in_range(cx: &mut AppContext) { + let text = "line1\nline2\nline3\nline4\nline5\nline6\nline7"; + let buffer = MultiBuffer::build_simple(text, cx); + let snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); + let mut crease_map = CreaseMap::default(); + + let creases = [ + Crease::new( + snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 5)), + FoldPlaceholder::test(), + |_row, _folded, _toggle, _cx| div(), + |_row, _folded, _cx| div(), + ), + Crease::new( + snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_after(Point::new(3, 5)), + FoldPlaceholder::test(), + |_row, _folded, _toggle, _cx| div(), + |_row, _folded, _cx| div(), + ), + Crease::new( + snapshot.anchor_before(Point::new(5, 0))..snapshot.anchor_after(Point::new(5, 5)), + FoldPlaceholder::test(), + |_row, _folded, _toggle, _cx| div(), + |_row, _folded, _cx| div(), + ), + ]; + crease_map.insert(creases, &snapshot); + + let crease_snapshot = crease_map.snapshot(); + + let range = MultiBufferRow(0)..MultiBufferRow(7); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 3); + + let range = MultiBufferRow(2)..MultiBufferRow(5); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 1); + assert_eq!(creases[0].range.start.to_point(&snapshot).row, 3); + + let range = MultiBufferRow(0)..MultiBufferRow(2); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 1); + assert_eq!(creases[0].range.start.to_point(&snapshot).row, 1); + + let range = MultiBufferRow(6)..MultiBufferRow(7); + let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect(); + assert_eq!(creases.len(), 0); + } } From dd257b8412c3c8005c710185dc8fc2eed3cc67ba Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:14:07 +0200 Subject: [PATCH 003/762] project panel: Do not allow collapsing auto-folded directory past the list of ancestors (#17594) Closes #ISSUE Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index be945cde773bbc..9add77c864db97 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -637,7 +637,9 @@ impl ProjectPanel { fn collapse_selected_entry(&mut self, _: &CollapseSelectedEntry, cx: &mut ViewContext) { if let Some((worktree, mut entry)) = self.selected_entry(cx) { if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) { - if folded_ancestors.current_ancestor_depth < folded_ancestors.max_ancestor_depth() { + if folded_ancestors.current_ancestor_depth + 1 + < folded_ancestors.max_ancestor_depth() + { folded_ancestors.current_ancestor_depth += 1; cx.notify(); return; From 12dde176084c9950b78093c1e7fb13e5988292ba Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 9 Sep 2024 18:31:55 +0200 Subject: [PATCH 004/762] assistant panel: automatically insert selections (#17589) Addresses parts of feedback from https://www.jacobcolling.com/friction-log/zed-friction-log Release Notes: - "Assistant::NewContext" now automatically does quote selection as well - "Assistant::QuoteSelection" now handles multicursor selections, inserting multiple excerpts. --- crates/assistant/src/assistant_panel.rs | 226 +++++++++++++----------- 1 file changed, 120 insertions(+), 106 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 0828b9b9916725..82888b498a3c80 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -939,9 +939,16 @@ impl AssistantPanel { cx: &mut ViewContext, ) { if let Some(panel) = workspace.panel::(cx) { - panel.update(cx, |panel, cx| { - panel.new_context(cx); - }); + let did_create_context = panel + .update(cx, |panel, cx| { + panel.new_context(cx)?; + + Some(()) + }) + .is_some(); + if did_create_context { + ContextEditor::quote_selection(workspace, &Default::default(), cx); + } } } @@ -3186,87 +3193,93 @@ impl ContextEditor { return; }; - let selection = editor.update(cx, |editor, cx| editor.selections.newest_adjusted(cx)); - let editor = editor.read(cx); - let buffer = editor.buffer().read(cx).snapshot(cx); - let range = editor::ToOffset::to_offset(&selection.start, &buffer) - ..editor::ToOffset::to_offset(&selection.end, &buffer); - let selected_text = buffer.text_for_range(range.clone()).collect::(); - if selected_text.is_empty() { - return; - } - - let start_language = buffer.language_at(range.start); - let end_language = buffer.language_at(range.end); - let language_name = if start_language == end_language { - start_language.map(|language| language.code_fence_block_name()) - } else { - None - }; - let language_name = language_name.as_deref().unwrap_or(""); - - let filename = buffer - .file_at(selection.start) - .map(|file| file.full_path(cx)); - - let text = if language_name == "markdown" { - selected_text - .lines() - .map(|line| format!("> {}", line)) - .collect::>() - .join("\n") - } else { - let start_symbols = buffer - .symbols_containing(selection.start, None) - .map(|(_, symbols)| symbols); - let end_symbols = buffer - .symbols_containing(selection.end, None) - .map(|(_, symbols)| symbols); - - let outline_text = - if let Some((start_symbols, end_symbols)) = start_symbols.zip(end_symbols) { - Some( - start_symbols - .into_iter() - .zip(end_symbols) - .take_while(|(a, b)| a == b) - .map(|(a, _)| a.text) - .collect::>() - .join(" > "), - ) + let mut creases = vec![]; + editor.update(cx, |editor, cx| { + let selections = editor.selections.all_adjusted(cx); + let buffer = editor.buffer().read(cx).snapshot(cx); + for selection in selections { + let range = editor::ToOffset::to_offset(&selection.start, &buffer) + ..editor::ToOffset::to_offset(&selection.end, &buffer); + let selected_text = buffer.text_for_range(range.clone()).collect::(); + if selected_text.is_empty() { + continue; + } + let start_language = buffer.language_at(range.start); + let end_language = buffer.language_at(range.end); + let language_name = if start_language == end_language { + start_language.map(|language| language.code_fence_block_name()) } else { None }; + let language_name = language_name.as_deref().unwrap_or(""); + let filename = buffer + .file_at(selection.start) + .map(|file| file.full_path(cx)); + let text = if language_name == "markdown" { + selected_text + .lines() + .map(|line| format!("> {}", line)) + .collect::>() + .join("\n") + } else { + let start_symbols = buffer + .symbols_containing(selection.start, None) + .map(|(_, symbols)| symbols); + let end_symbols = buffer + .symbols_containing(selection.end, None) + .map(|(_, symbols)| symbols); + + let outline_text = if let Some((start_symbols, end_symbols)) = + start_symbols.zip(end_symbols) + { + Some( + start_symbols + .into_iter() + .zip(end_symbols) + .take_while(|(a, b)| a == b) + .map(|(a, _)| a.text) + .collect::>() + .join(" > "), + ) + } else { + None + }; - let line_comment_prefix = start_language - .and_then(|l| l.default_scope().line_comment_prefixes().first().cloned()); - - let fence = codeblock_fence_for_path( - filename.as_deref(), - Some(selection.start.row..selection.end.row), - ); + let line_comment_prefix = start_language + .and_then(|l| l.default_scope().line_comment_prefixes().first().cloned()); - if let Some((line_comment_prefix, outline_text)) = line_comment_prefix.zip(outline_text) - { - let breadcrumb = format!("{line_comment_prefix}Excerpt from: {outline_text}\n"); - format!("{fence}{breadcrumb}{selected_text}\n```") - } else { - format!("{fence}{selected_text}\n```") - } - }; + let fence = codeblock_fence_for_path( + filename.as_deref(), + Some(selection.start.row..selection.end.row), + ); - let crease_title = if let Some(path) = filename { - let start_line = selection.start.row + 1; - let end_line = selection.end.row + 1; - if start_line == end_line { - format!("{}, Line {}", path.display(), start_line) - } else { - format!("{}, Lines {} to {}", path.display(), start_line, end_line) + if let Some((line_comment_prefix, outline_text)) = + line_comment_prefix.zip(outline_text) + { + let breadcrumb = + format!("{line_comment_prefix}Excerpt from: {outline_text}\n"); + format!("{fence}{breadcrumb}{selected_text}\n```") + } else { + format!("{fence}{selected_text}\n```") + } + }; + let crease_title = if let Some(path) = filename { + let start_line = selection.start.row + 1; + let end_line = selection.end.row + 1; + if start_line == end_line { + format!("{}, Line {}", path.display(), start_line) + } else { + format!("{}, Lines {} to {}", path.display(), start_line, end_line) + } + } else { + "Quoted selection".to_string() + }; + creases.push((text, crease_title)); } - } else { - "Quoted selection".to_string() - }; - + }); + if creases.is_empty() { + return; + } // Activate the panel if !panel.focus_handle(cx).contains_focused(cx) { workspace.toggle_panel_focus::(cx); @@ -3283,39 +3296,40 @@ impl ContextEditor { context.update(cx, |context, cx| { context.editor.update(cx, |editor, cx| { editor.insert("\n", cx); + for (text, crease_title) in creases { + let point = editor.selections.newest::(cx).head(); + let start_row = MultiBufferRow(point.row); - let point = editor.selections.newest::(cx).head(); - let start_row = MultiBufferRow(point.row); + editor.insert(&text, cx); - editor.insert(&text, cx); + let snapshot = editor.buffer().read(cx).snapshot(cx); + let anchor_before = snapshot.anchor_after(point); + let anchor_after = editor + .selections + .newest_anchor() + .head() + .bias_left(&snapshot); - let snapshot = editor.buffer().read(cx).snapshot(cx); - let anchor_before = snapshot.anchor_after(point); - let anchor_after = editor - .selections - .newest_anchor() - .head() - .bias_left(&snapshot); + editor.insert("\n", cx); - editor.insert("\n", cx); - - let fold_placeholder = quote_selection_fold_placeholder( - crease_title, - cx.view().downgrade(), - ); - let crease = Crease::new( - anchor_before..anchor_after, - fold_placeholder, - render_quote_selection_output_toggle, - |_, _, _| Empty.into_any(), - ); - editor.insert_creases(vec![crease], cx); - editor.fold_at( - &FoldAt { - buffer_row: start_row, - }, - cx, - ); + let fold_placeholder = quote_selection_fold_placeholder( + crease_title, + cx.view().downgrade(), + ); + let crease = Crease::new( + anchor_before..anchor_after, + fold_placeholder, + render_quote_selection_output_toggle, + |_, _, _| Empty.into_any(), + ); + editor.insert_creases(vec![crease], cx); + editor.fold_at( + &FoldAt { + buffer_row: start_row, + }, + cx, + ); + } }) }); }; From 59be07ad90206fdba94dbdc66154831b24b9478a Mon Sep 17 00:00:00 2001 From: Fernando Tagawa Date: Mon, 9 Sep 2024 18:27:45 -0300 Subject: [PATCH 005/762] x11: Implement Drag and Drop (#17491) Closes #16225 Release Notes: - x11: Implemented Drag and Drop. --- crates/gpui/src/platform/linux/x11/client.rs | 210 ++++++++++++++++++- crates/gpui/src/platform/linux/x11/window.rs | 17 ++ typos.toml | 4 +- 3 files changed, 226 insertions(+), 5 deletions(-) diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index f127d312d24bb9..0909d09f25438b 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -1,3 +1,4 @@ +use core::str; use std::cell::RefCell; use std::collections::HashSet; use std::ops::Deref; @@ -9,6 +10,8 @@ use calloop::generic::{FdWrapper, Generic}; use calloop::{EventLoop, LoopHandle, RegistrationToken}; use collections::HashMap; +use http_client::Url; +use smallvec::SmallVec; use util::ResultExt; use x11rb::connection::{Connection, RequestConnection}; @@ -17,9 +20,13 @@ use x11rb::errors::ConnectionError; use x11rb::protocol::randr::ConnectionExt as _; use x11rb::protocol::xinput::ConnectionExt; use x11rb::protocol::xkb::ConnectionExt as _; -use x11rb::protocol::xproto::{ChangeWindowAttributesAux, ConnectionExt as _, KeyPressEvent}; +use x11rb::protocol::xproto::{ + AtomEnum, ChangeWindowAttributesAux, ClientMessageData, ClientMessageEvent, ConnectionExt as _, + EventMask, KeyPressEvent, +}; use x11rb::protocol::{randr, render, xinput, xkb, xproto, Event}; use x11rb::resource_manager::Database; +use x11rb::wrapper::ConnectionExt as _; use x11rb::xcb_ffi::XCBConnection; use xim::{x11rb::X11rbClient, Client}; use xim::{AttributeName, InputStyle}; @@ -30,8 +37,8 @@ use crate::platform::linux::LinuxClient; use crate::platform::{LinuxCommon, PlatformWindow}; use crate::{ modifiers_from_xinput_info, point, px, AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, - DisplayId, Keystroke, Modifiers, ModifiersChangedEvent, Pixels, Platform, PlatformDisplay, - PlatformInput, Point, ScrollDelta, Size, TouchPhase, WindowParams, X11Window, + DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, Pixels, Platform, + PlatformDisplay, PlatformInput, Point, ScrollDelta, Size, TouchPhase, WindowParams, X11Window, }; use super::{button_of_key, modifiers_from_state, pressed_button_from_mask}; @@ -101,6 +108,14 @@ struct XKBStateNotiy { locked_layout: LayoutIndex, } +#[derive(Debug, Default)] +pub struct Xdnd { + other_window: xproto::Window, + drag_type: u32, + retrieved: bool, + position: Point, +} + pub struct X11ClientState { pub(crate) loop_handle: LoopHandle<'static, X11Client>, pub(crate) event_loop: Option>, @@ -142,6 +157,7 @@ pub struct X11ClientState { pub(crate) common: LinuxCommon, pub(crate) clipboard: x11_clipboard::Clipboard, pub(crate) clipboard_item: Option, + pub(crate) xdnd_state: Xdnd, } #[derive(Clone)] @@ -423,6 +439,7 @@ impl X11Client { clipboard, clipboard_item: None, + xdnd_state: Xdnd::default(), }))) } @@ -611,7 +628,7 @@ impl X11Client { match event { Event::ClientMessage(event) => { let window = self.get_window(event.window)?; - let [atom, _arg1, arg2, arg3, _arg4] = event.data.as_data32(); + let [atom, arg1, arg2, arg3, arg4] = event.data.as_data32(); let mut state = self.0.borrow_mut(); if atom == state.atoms.WM_DELETE_WINDOW { @@ -627,6 +644,106 @@ impl X11Client { hi: arg3 as i32, }) } + + if event.type_ == state.atoms.XdndEnter { + state.xdnd_state.other_window = atom; + if (arg1 & 0x1) == 0x1 { + state.xdnd_state.drag_type = xdnd_get_supported_atom( + &state.xcb_connection, + &state.atoms, + state.xdnd_state.other_window, + ); + } else { + if let Some(atom) = [arg2, arg3, arg4] + .into_iter() + .find(|atom| xdnd_is_atom_supported(*atom, &state.atoms)) + { + state.xdnd_state.drag_type = atom; + } + } + } else if event.type_ == state.atoms.XdndLeave { + window.handle_input(PlatformInput::FileDrop(FileDropEvent::Pending { + position: state.xdnd_state.position, + })); + window.handle_input(PlatformInput::FileDrop(FileDropEvent::Exited {})); + state.xdnd_state = Xdnd::default(); + } else if event.type_ == state.atoms.XdndPosition { + if let Ok(pos) = state + .xcb_connection + .query_pointer(event.window) + .unwrap() + .reply() + { + state.xdnd_state.position = + Point::new(Pixels(pos.win_x as f32), Pixels(pos.win_y as f32)); + } + if !state.xdnd_state.retrieved { + state + .xcb_connection + .convert_selection( + event.window, + state.atoms.XdndSelection, + state.xdnd_state.drag_type, + state.atoms.XDND_DATA, + arg3, + ) + .unwrap(); + } + xdnd_send_status( + &state.xcb_connection, + &state.atoms, + event.window, + state.xdnd_state.other_window, + arg4, + ); + window.handle_input(PlatformInput::FileDrop(FileDropEvent::Pending { + position: state.xdnd_state.position, + })); + } else if event.type_ == state.atoms.XdndDrop { + xdnd_send_finished( + &state.xcb_connection, + &state.atoms, + event.window, + state.xdnd_state.other_window, + ); + window.handle_input(PlatformInput::FileDrop(FileDropEvent::Submit { + position: state.xdnd_state.position, + })); + state.xdnd_state = Xdnd::default(); + } + } + Event::SelectionNotify(event) => { + let window = self.get_window(event.requestor)?; + let mut state = self.0.borrow_mut(); + let property = state.xcb_connection.get_property( + false, + event.requestor, + state.atoms.XDND_DATA, + AtomEnum::ANY, + 0, + 1024, + ); + if property.as_ref().log_err().is_none() { + return Some(()); + } + if let Ok(reply) = property.unwrap().reply() { + match str::from_utf8(&reply.value) { + Ok(file_list) => { + let paths: SmallVec<[_; 2]> = file_list + .lines() + .filter_map(|path| Url::parse(path).log_err()) + .filter_map(|url| url.to_file_path().log_err()) + .collect(); + let input = PlatformInput::FileDrop(FileDropEvent::Entered { + position: state.xdnd_state.position, + paths: crate::ExternalPaths(paths), + }); + window.handle_input(input); + state.xdnd_state.retrieved = true; + } + Err(_) => {} + } + } } Event::ConfigureNotify(event) => { let bounds = Bounds { @@ -1179,6 +1296,16 @@ impl LinuxClient for X11Client { state.scale_factor, state.common.appearance, )?; + state + .xcb_connection + .change_property32( + xproto::PropMode::REPLACE, + x_window, + state.atoms.XdndAware, + state.atoms.XA_ATOM, + &[5], + ) + .unwrap(); let screen_resources = state .xcb_connection @@ -1540,3 +1667,78 @@ fn check_gtk_frame_extents_supported( supported_atoms.contains(&atoms._GTK_FRAME_EXTENTS) } + +fn xdnd_is_atom_supported(atom: u32, atoms: &XcbAtoms) -> bool { + return atom == atoms.TEXT + || atom == atoms.STRING + || atom == atoms.UTF8_STRING + || atom == atoms.TEXT_PLAIN + || atom == atoms.TEXT_PLAIN_UTF8 + || atom == atoms.TextUriList; +} + +fn xdnd_get_supported_atom( + xcb_connection: &XCBConnection, + supported_atoms: &XcbAtoms, + target: xproto::Window, +) -> u32 { + let property = xcb_connection + .get_property( + false, + target, + supported_atoms.XdndTypeList, + AtomEnum::ANY, + 0, + 1024, + ) + .unwrap(); + if let Ok(reply) = property.reply() { + if let Some(atoms) = reply.value32() { + for atom in atoms { + if xdnd_is_atom_supported(atom, &supported_atoms) { + return atom; + } + } + } + } + return 0; +} + +fn xdnd_send_finished( + xcb_connection: &XCBConnection, + atoms: &XcbAtoms, + source: xproto::Window, + target: xproto::Window, +) { + let message = ClientMessageEvent { + format: 32, + window: target, + type_: atoms.XdndFinished, + data: ClientMessageData::from([source, 1, atoms.XdndActionCopy, 0, 0]), + sequence: 0, + response_type: xproto::CLIENT_MESSAGE_EVENT, + }; + xcb_connection + .send_event(false, target, EventMask::default(), message) + .unwrap(); +} + +fn xdnd_send_status( + xcb_connection: &XCBConnection, + atoms: &XcbAtoms, + source: xproto::Window, + target: xproto::Window, + action: u32, +) { + let message = ClientMessageEvent { + format: 32, + window: target, + type_: atoms.XdndStatus, + data: ClientMessageData::from([source, 1, 0, 0, action]), + sequence: 0, + response_type: xproto::CLIENT_MESSAGE_EVENT, + }; + xcb_connection + .send_event(false, target, EventMask::default(), message) + .unwrap(); +} diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index f1aa10f31152ea..b0cf82d605ffff 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -32,7 +32,24 @@ use std::{ use super::{X11Display, XINPUT_MASTER_DEVICE}; x11rb::atom_manager! { pub XcbAtoms: AtomsCookie { + XA_ATOM, + XdndAware, + XdndStatus, + XdndEnter, + XdndLeave, + XdndPosition, + XdndSelection, + XdndDrop, + XdndFinished, + XdndTypeList, + XdndActionCopy, + TextUriList: b"text/uri-list", UTF8_STRING, + TEXT, + STRING, + TEXT_PLAIN_UTF8: b"text/plain;charset=utf-8", + TEXT_PLAIN: b"text/plain", + XDND_DATA, WM_PROTOCOLS, WM_DELETE_WINDOW, WM_CHANGE_STATE, diff --git a/typos.toml b/typos.toml index 2bbb4907a72ad4..1b5c82b9069806 100644 --- a/typos.toml +++ b/typos.toml @@ -56,6 +56,8 @@ extend-ignore-re = [ "rename = \"sesssion_id\"", "doas", # ProtoLS crate with tree-sitter Protobuf grammar. - "protols" + "protols", + # x11rb SelectionNotifyEvent struct field + "requestor" ] check-filename = true From f92d0de58d1d14faf1eb1f68d8cdff3792613927 Mon Sep 17 00:00:00 2001 From: Taras Martyniuk Date: Tue, 10 Sep 2024 00:31:57 +0300 Subject: [PATCH 006/762] terraform: Update indents (#17200) Closes #15988 Fixed indent configuration for terraform/HCL Release Notes: - N/A https://github.com/user-attachments/assets/09b44ac9-ef09-463c-876d-0fbcdd1f09c9 --------- Co-authored-by: Marshall Bowers --- extensions/terraform/languages/hcl/config.toml | 1 + extensions/terraform/languages/hcl/indents.scm | 8 +++++--- extensions/terraform/languages/terraform-vars/config.toml | 1 + extensions/terraform/languages/terraform-vars/indents.scm | 8 +++++--- extensions/terraform/languages/terraform/indents.scm | 8 +++++--- 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/extensions/terraform/languages/hcl/config.toml b/extensions/terraform/languages/hcl/config.toml index 891b2f38d4182c..be7e601e014482 100644 --- a/extensions/terraform/languages/hcl/config.toml +++ b/extensions/terraform/languages/hcl/config.toml @@ -12,3 +12,4 @@ brackets = [ { start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] }, { start = "/*", end = " */", close = true, newline = false, not_in = ["comment", "string"] }, ] +tab_size = 2 diff --git a/extensions/terraform/languages/hcl/indents.scm b/extensions/terraform/languages/hcl/indents.scm index 74edb66bdf28d8..be12af1d0cd686 100644 --- a/extensions/terraform/languages/hcl/indents.scm +++ b/extensions/terraform/languages/hcl/indents.scm @@ -6,6 +6,8 @@ (function_call) ] @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent -(_ "{" "}" @end) @indent +[ + "]" + "}" + ")" +] @outdent diff --git a/extensions/terraform/languages/terraform-vars/config.toml b/extensions/terraform/languages/terraform-vars/config.toml index 12ed7e236c4fea..4d803ee36e934d 100644 --- a/extensions/terraform/languages/terraform-vars/config.toml +++ b/extensions/terraform/languages/terraform-vars/config.toml @@ -12,3 +12,4 @@ brackets = [ { start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] }, { start = "/*", end = " */", close = true, newline = false, not_in = ["comment", "string"] }, ] +tab_size = 2 diff --git a/extensions/terraform/languages/terraform-vars/indents.scm b/extensions/terraform/languages/terraform-vars/indents.scm index 95ad93df1da98b..b9ba0ad56a3ed0 100644 --- a/extensions/terraform/languages/terraform-vars/indents.scm +++ b/extensions/terraform/languages/terraform-vars/indents.scm @@ -6,9 +6,11 @@ (function_call) ] @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent -(_ "{" "}" @end) @indent +[ + "]" + "}" + ")" +] @outdent ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/terraform/indents.scm ; inherits: hcl diff --git a/extensions/terraform/languages/terraform/indents.scm b/extensions/terraform/languages/terraform/indents.scm index 95ad93df1da98b..b9ba0ad56a3ed0 100644 --- a/extensions/terraform/languages/terraform/indents.scm +++ b/extensions/terraform/languages/terraform/indents.scm @@ -6,9 +6,11 @@ (function_call) ] @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent -(_ "{" "}" @end) @indent +[ + "]" + "}" + ")" +] @outdent ; https://github.com/nvim-treesitter/nvim-treesitter/blob/ce4adf11cfe36fc5b0e5bcdce0c7c6e8fbc9798a/queries/terraform/indents.scm ; inherits: hcl From c1193875e8e93746379514e9ce4fa3db5a0b6503 Mon Sep 17 00:00:00 2001 From: Sergio Nonide <60042926+senonide@users.noreply.github.com> Date: Mon, 9 Sep 2024 23:44:02 +0200 Subject: [PATCH 007/762] Fix blurry cursor on Wayland at a scale other than 100% (#17496) Closes #13258 Release Notes: - Fixed blurry mouse cursor on wayland when the screen scale is other than 100% Before: ![Screenshot from 2024-09-06 14-38-30](https://github.com/user-attachments/assets/e4553503-ecea-4b53-b80d-43732d34fa62) After: ![Screenshot from 2024-09-06 14-38-56](https://github.com/user-attachments/assets/ce563d3a-2b44-44b9-9f59-f0042609924e) --- .../gpui/src/platform/linux/wayland/client.rs | 3 +- .../gpui/src/platform/linux/wayland/cursor.rs | 33 +++++++++++++++---- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 67cd1dcbd43b70..57c43a7e46a0c0 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -476,7 +476,8 @@ impl WaylandClient { .as_ref() .map(|primary_selection_manager| primary_selection_manager.get_device(&seat, &qh, ())); - let mut cursor = Cursor::new(&conn, &globals, 24); + // FIXME: Determine the scaling factor dynamically by the compositor + let mut cursor = Cursor::new(&conn, &globals, 24, 2); handle .insert_source(XDPEventSource::new(&common.background_executor), { diff --git a/crates/gpui/src/platform/linux/wayland/cursor.rs b/crates/gpui/src/platform/linux/wayland/cursor.rs index 6a527650429a4e..ea29eee73c71f4 100644 --- a/crates/gpui/src/platform/linux/wayland/cursor.rs +++ b/crates/gpui/src/platform/linux/wayland/cursor.rs @@ -11,6 +11,7 @@ pub(crate) struct Cursor { theme_name: Option, surface: WlSurface, size: u32, + scale: u32, shm: WlShm, connection: Connection, } @@ -23,7 +24,7 @@ impl Drop for Cursor { } impl Cursor { - pub fn new(connection: &Connection, globals: &Globals, size: u32) -> Self { + pub fn new(connection: &Connection, globals: &Globals, size: u32, scale: u32) -> Self { Self { theme: CursorTheme::load(&connection, globals.shm.clone(), size).log_err(), theme_name: None, @@ -31,6 +32,7 @@ impl Cursor { shm: globals.shm.clone(), connection: connection.clone(), size, + scale, } } @@ -38,14 +40,18 @@ impl Cursor { if let Some(size) = size { self.size = size; } - if let Some(theme) = - CursorTheme::load_from_name(&self.connection, self.shm.clone(), theme_name, self.size) - .log_err() + if let Some(theme) = CursorTheme::load_from_name( + &self.connection, + self.shm.clone(), + theme_name, + self.size * self.scale, + ) + .log_err() { self.theme = Some(theme); self.theme_name = Some(theme_name.to_string()); } else if let Some(theme) = - CursorTheme::load(&self.connection, self.shm.clone(), self.size).log_err() + CursorTheme::load(&self.connection, self.shm.clone(), self.size * self.scale).log_err() { self.theme = Some(theme); self.theme_name = None; @@ -91,9 +97,22 @@ impl Cursor { let (width, height) = buffer.dimensions(); let (hot_x, hot_y) = buffer.hotspot(); - wl_pointer.set_cursor(serial_id, Some(&self.surface), hot_x as i32, hot_y as i32); + let scaled_width = width / self.scale; + let scaled_height = height / self.scale; + let scaled_hot_x = hot_x / self.scale; + let scaled_hot_y = hot_y / self.scale; + + self.surface.set_buffer_scale(self.scale as i32); + + wl_pointer.set_cursor( + serial_id, + Some(&self.surface), + scaled_hot_x as i32, + scaled_hot_y as i32, + ); self.surface.attach(Some(&buffer), 0, 0); - self.surface.damage(0, 0, width as i32, height as i32); + self.surface + .damage(0, 0, scaled_width as i32, scaled_height as i32); self.surface.commit(); } } else { From da9601c69851b2b9c2997611d4834307a2d6b5f9 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 9 Sep 2024 17:49:40 -0400 Subject: [PATCH 008/762] markdown: Handle definition lists in parser (#17617) Resolves https://github.com/zed-industries/zed/issues/17607. This PR makes it so the Markdown parser can handle Markdown containing definition lists. Note that this is just parser support, we aren't yet doing anything with the definition lists themselves. Release Notes: - N/A --- crates/markdown/src/parser.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index cb83b2356e61ea..3dd11be9838a91 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -232,6 +232,10 @@ pub enum MarkdownTag { /// A metadata block. MetadataBlock(MetadataBlockKind), + + DefinitionList, + DefinitionListTitle, + DefinitionListDefinition, } #[derive(Clone, Debug, PartialEq)] @@ -317,11 +321,9 @@ impl From> for MarkdownTag { }, pulldown_cmark::Tag::HtmlBlock => MarkdownTag::HtmlBlock, pulldown_cmark::Tag::MetadataBlock(kind) => MarkdownTag::MetadataBlock(kind), - pulldown_cmark::Tag::DefinitionList - | pulldown_cmark::Tag::DefinitionListTitle - | pulldown_cmark::Tag::DefinitionListDefinition => { - unimplemented!("definition lists are not yet supported") - } + pulldown_cmark::Tag::DefinitionList => MarkdownTag::DefinitionList, + pulldown_cmark::Tag::DefinitionListTitle => MarkdownTag::DefinitionListTitle, + pulldown_cmark::Tag::DefinitionListDefinition => MarkdownTag::DefinitionListDefinition, } } } From f71cb14d7a3c876b1a5a69949a5a198ed3bfbd5a Mon Sep 17 00:00:00 2001 From: ZZzzaaKK <66885975+ZZzzaaKK@users.noreply.github.com> Date: Mon, 9 Sep 2024 23:52:43 +0200 Subject: [PATCH 009/762] Add ',' to word chars for line wrapping (#17590) Closes #16407 Adds ',' to the is_word_char() matches for line wrapping, so that commas aren't wrapped to the start of a new line. Release Notes: - N/A --- crates/gpui/src/text_system/line_wrapper.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/gpui/src/text_system/line_wrapper.rs b/crates/gpui/src/text_system/line_wrapper.rs index 31e852afdf5178..3d38ca315c04ae 100644 --- a/crates/gpui/src/text_system/line_wrapper.rs +++ b/crates/gpui/src/text_system/line_wrapper.rs @@ -153,7 +153,7 @@ impl LineWrapper { matches!(c, '\u{0400}'..='\u{04FF}') || // Some other known special characters that should be treated as word characters, // e.g. `a-b`, `var_name`, `I'm`, '@mention`, `#hashtag`, `100%`, `3.1415`, `2^3`, `a~b`, etc. - matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~') || + matches!(c, '-' | '_' | '.' | '\'' | '$' | '%' | '@' | '#' | '^' | '~' | ',') || // Characters that used in URL, e.g. `https://github.com/zed-industries/zed?a=1&b=2` for better wrapping a long URL. matches!(c, '/' | ':' | '?' | '&' | '=') || // `⋯` character is special used in Zed, to keep this at the end of the line. From 2fc74a1b7132db1d9c20b1405b4bd3b9d030230f Mon Sep 17 00:00:00 2001 From: Kenichi Kamiya Date: Tue, 10 Sep 2024 19:50:43 +0900 Subject: [PATCH 010/762] Update doc comments with tabs.file_icons default (#17629) The diff only contains doc comments changes, however I expect this also fixes generating JSON Schema which generated by [schemars](https://github.com/GREsau/schemars/blob/092dc17ae4831d42974653588cebcc089d07493e/docs/examples/6-doc_comments.md). This default value is actually true at first. 1818fef32f24f24f082c6f34a4c3100add6d328c However, it was changed in the following commit. bf7e474bbcc2fadf002adb273e2584c77c1573e3 Closes #17628 Release Notes: - N/A --- crates/workspace/src/item.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 422ed1f165c73a..935f0268b62ffe 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -79,7 +79,7 @@ pub struct ItemSettingsContent { close_position: Option, /// Whether to show the file icon for a tab. /// - /// Default: true + /// Default: false file_icons: Option, } From 56bc3c36ad1140562c4ded64536c65f17e0d173f Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 12:55:46 +0200 Subject: [PATCH 011/762] project search: make sorting comparator comply with Ord preconditions (#17604) Closes #17493 /cc @SomeoneToIgnore /cc @ConradIrwin Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 38 +++++++++--------- crates/util/src/paths.rs | 42 +++++++++++--------- crates/util/src/util.rs | 48 +++++++++++------------ 3 files changed, 65 insertions(+), 63 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 9add77c864db97..32ccd47a89db2e 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -3519,9 +3519,9 @@ mod tests { " > .git", " > a", " v b", - " > [EDITOR: ''] <== selected", " > 3", " > 4", + " > [EDITOR: ''] <== selected", " a-different-filename.tar.gz", " > C", " .dockerignore", @@ -3542,10 +3542,10 @@ mod tests { " > .git", " > a", " v b", - " > [PROCESSING: 'new-dir']", - " > 3 <== selected", + " > 3", " > 4", - " a-different-filename.tar.gz", + " > [PROCESSING: 'new-dir']", + " a-different-filename.tar.gz <== selected", " > C", " .dockerignore", ] @@ -3559,10 +3559,10 @@ mod tests { " > .git", " > a", " v b", - " > 3 <== selected", + " > 3", " > 4", " > new-dir", - " a-different-filename.tar.gz", + " a-different-filename.tar.gz <== selected", " > C", " .dockerignore", ] @@ -3576,10 +3576,10 @@ mod tests { " > .git", " > a", " v b", - " > [EDITOR: '3'] <== selected", + " > 3", " > 4", " > new-dir", - " a-different-filename.tar.gz", + " [EDITOR: 'a-different-filename.tar.gz'] <== selected", " > C", " .dockerignore", ] @@ -3594,10 +3594,10 @@ mod tests { " > .git", " > a", " v b", - " > 3 <== selected", + " > 3", " > 4", " > new-dir", - " a-different-filename.tar.gz", + " a-different-filename.tar.gz <== selected", " > C", " .dockerignore", ] @@ -3844,8 +3844,8 @@ mod tests { &[ // "v root1", - " one.two.txt <== selected", - " one.txt", + " one.txt <== selected", + " one.two.txt", ] ); @@ -3862,9 +3862,9 @@ mod tests { &[ // "v root1", - " one.two copy.txt <== selected", - " one.two.txt", " one.txt", + " one copy.txt <== selected", + " one.two.txt", ] ); @@ -3878,10 +3878,10 @@ mod tests { &[ // "v root1", - " one.two copy 1.txt <== selected", - " one.two copy.txt", - " one.two.txt", " one.txt", + " one copy.txt", + " one copy 1.txt <== selected", + " one.two.txt", ] ); } @@ -4074,8 +4074,8 @@ mod tests { " > b", " four.txt", " one.txt", - " three copy.txt <== selected", " three.txt", + " three copy.txt <== selected", " two.txt", ] ); @@ -4105,8 +4105,8 @@ mod tests { " > b", " four.txt", " one.txt", - " three copy.txt", " three.txt", + " three copy.txt", " two.txt", ] ); diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 3143cb49e3e7cd..cd5beedf47b2cb 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -9,9 +9,8 @@ use std::{ use globset::{Glob, GlobSet, GlobSetBuilder}; use regex::Regex; use serde::{Deserialize, Serialize}; -use unicase::UniCase; -use crate::{maybe, NumericPrefixWithSuffix}; +use crate::NumericPrefixWithSuffix; /// Returns the path to the user's home directory. pub fn home_dir() -> &'static PathBuf { @@ -282,34 +281,29 @@ pub fn compare_paths( let a_is_file = components_a.peek().is_none() && a_is_file; let b_is_file = components_b.peek().is_none() && b_is_file; let ordering = a_is_file.cmp(&b_is_file).then_with(|| { - let maybe_numeric_ordering = maybe!({ - let path_a = Path::new(component_a.as_os_str()); - let num_and_remainder_a = if a_is_file { + let path_a = Path::new(component_a.as_os_str()); + let num_and_remainder_a = NumericPrefixWithSuffix::from_numeric_prefixed_str( + if a_is_file { path_a.file_stem() } else { path_a.file_name() } .and_then(|s| s.to_str()) - .and_then(NumericPrefixWithSuffix::from_numeric_prefixed_str)?; + .unwrap_or_default(), + ); - let path_b = Path::new(component_b.as_os_str()); - let num_and_remainder_b = if b_is_file { + let path_b = Path::new(component_b.as_os_str()); + let num_and_remainder_b = NumericPrefixWithSuffix::from_numeric_prefixed_str( + if b_is_file { path_b.file_stem() } else { path_b.file_name() } .and_then(|s| s.to_str()) - .and_then(NumericPrefixWithSuffix::from_numeric_prefixed_str)?; + .unwrap_or_default(), + ); - num_and_remainder_a.partial_cmp(&num_and_remainder_b) - }); - - maybe_numeric_ordering.unwrap_or_else(|| { - let name_a = UniCase::new(component_a.as_os_str().to_string_lossy()); - let name_b = UniCase::new(component_b.as_os_str().to_string_lossy()); - - name_a.cmp(&name_b) - }) + num_and_remainder_a.cmp(&num_and_remainder_b) }); if !ordering.is_eq() { return ordering; @@ -350,6 +344,18 @@ mod tests { (Path::new("test_dirs/1.46/bar_2"), true), ] ); + let mut paths = vec![ + (Path::new("root1/one.txt"), true), + (Path::new("root1/one.two.txt"), true), + ]; + paths.sort_by(|&a, &b| compare_paths(a, b)); + assert_eq!( + paths, + vec![ + (Path::new("root1/one.txt"), true), + (Path::new("root1/one.two.txt"), true), + ] + ); } #[test] diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index 25ef3630352ba0..40a5cf6212e443 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -644,27 +644,27 @@ impl RangeExt for RangeInclusive { /// This is useful for turning regular alphanumerically sorted sequences as `1-abc, 10, 11-def, .., 2, 21-abc` /// into `1-abc, 2, 10, 11-def, .., 21-abc` #[derive(Debug, PartialEq, Eq)] -pub struct NumericPrefixWithSuffix<'a>(i32, &'a str); +pub struct NumericPrefixWithSuffix<'a>(Option, &'a str); impl<'a> NumericPrefixWithSuffix<'a> { - pub fn from_numeric_prefixed_str(str: &'a str) -> Option { + pub fn from_numeric_prefixed_str(str: &'a str) -> Self { let i = str.chars().take_while(|c| c.is_ascii_digit()).count(); let (prefix, remainder) = str.split_at(i); - match prefix.parse::() { - Ok(prefix) => Some(NumericPrefixWithSuffix(prefix, remainder)), - Err(_) => None, - } + let prefix = prefix.parse().ok(); + Self(prefix, remainder) } } - impl Ord for NumericPrefixWithSuffix<'_> { fn cmp(&self, other: &Self) -> Ordering { - let NumericPrefixWithSuffix(num_a, remainder_a) = self; - let NumericPrefixWithSuffix(num_b, remainder_b) = other; - num_a - .cmp(num_b) - .then_with(|| UniCase::new(remainder_a).cmp(&UniCase::new(remainder_b))) + match (self.0, other.0) { + (None, None) => UniCase::new(self.1).cmp(&UniCase::new(other.1)), + (None, Some(_)) => Ordering::Greater, + (Some(_), None) => Ordering::Less, + (Some(a), Some(b)) => a + .cmp(&b) + .then_with(|| UniCase::new(self.1).cmp(&UniCase::new(other.1))), + } } } @@ -737,66 +737,62 @@ mod tests { let target = "1a"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(1, "a")) + NumericPrefixWithSuffix(Some(1), "a") ); let target = "12ab"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(12, "ab")) + NumericPrefixWithSuffix(Some(12), "ab") ); let target = "12_ab"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(12, "_ab")) + NumericPrefixWithSuffix(Some(12), "_ab") ); let target = "1_2ab"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(1, "_2ab")) + NumericPrefixWithSuffix(Some(1), "_2ab") ); let target = "1.2"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(1, ".2")) + NumericPrefixWithSuffix(Some(1), ".2") ); let target = "1.2_a"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(1, ".2_a")) + NumericPrefixWithSuffix(Some(1), ".2_a") ); let target = "12.2_a"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(12, ".2_a")) + NumericPrefixWithSuffix(Some(12), ".2_a") ); let target = "12a.2_a"; assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(target), - Some(NumericPrefixWithSuffix(12, "a.2_a")) + NumericPrefixWithSuffix(Some(12), "a.2_a") ); } #[test] fn test_numeric_prefix_with_suffix() { let mut sorted = vec!["1-abc", "10", "11def", "2", "21-abc"]; - sorted.sort_by_key(|s| { - NumericPrefixWithSuffix::from_numeric_prefixed_str(s).unwrap_or_else(|| { - panic!("Cannot convert string `{s}` into NumericPrefixWithSuffix") - }) - }); + sorted.sort_by_key(|s| NumericPrefixWithSuffix::from_numeric_prefixed_str(s)); assert_eq!(sorted, ["1-abc", "2", "10", "11def", "21-abc"]); for numeric_prefix_less in ["numeric_prefix_less", "aaa", "~™£"] { assert_eq!( NumericPrefixWithSuffix::from_numeric_prefixed_str(numeric_prefix_less), - None, + NumericPrefixWithSuffix(None, numeric_prefix_less), "String without numeric prefix `{numeric_prefix_less}` should not be converted into NumericPrefixWithSuffix" ) } From 75256bdfe1b054688d79a0b6fb4900888ea16279 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 14:41:02 +0200 Subject: [PATCH 012/762] lsp: Add support for workspace/workspaceFolders request (#17639) Related to: #17574 Release Notes: - N/A --- crates/project/src/lsp_store.rs | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 5b4dbd1deaff92..1d9ca98c064720 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -40,7 +40,7 @@ use lsp::{ CompletionContext, DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, FileSystemWatcher, InsertTextFormat, LanguageServer, LanguageServerBinary, LanguageServerId, LspRequestFuture, MessageActionItem, MessageType, OneOf, ServerHealthStatus, - ServerStatus, SymbolKind, TextEdit, WorkDoneProgressCancelParams, + ServerStatus, SymbolKind, TextEdit, Url, WorkDoneProgressCancelParams, WorkspaceFolder, }; use parking_lot::{Mutex, RwLock}; use postage::watch; @@ -4774,6 +4774,30 @@ impl LspStore { }) .detach(); + let id = language_server.server_id(); + language_server + .on_request::({ + let this = this.clone(); + move |_, mut cx| { + let this = this.clone(); + async move { + let Some(server) = + this.update(&mut cx, |this, _| this.language_server_for_id(id))? + else { + return Ok(None); + }; + let root = server.root_path(); + let Ok(uri) = Url::from_file_path(&root) else { + return Ok(None); + }; + Ok(Some(vec![WorkspaceFolder { + uri, + name: Default::default(), + }])) + } + } + }) + .detach(); // Even though we don't have handling for these requests, respond to them to // avoid stalling any language server like `gopls` which waits for a response // to these requests when initializing. From 929eff815cd7a03811536375b71de110ae53db60 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 14:50:23 +0200 Subject: [PATCH 013/762] project panel: Get rid of unwrap in autofolding code (#17641) @WeetHet spotted a crash in recently-introduced project panel autofolding that relates to unwrapping. Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 32ccd47a89db2e..56d524cdc7130c 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -115,7 +115,6 @@ struct EntryDetails { is_cut: bool, git_status: Option, is_private: bool, - is_auto_folded: bool, worktree_id: WorktreeId, canonical_path: Option>, } @@ -2099,7 +2098,6 @@ impl ProjectPanel { .map_or(false, |e| e.is_cut() && e.items().contains(&selection)), git_status: status, is_private: entry.is_private, - is_auto_folded: difference > 1, worktree_id: *worktree_id, canonical_path: entry.canonical_path.clone(), }; @@ -2212,7 +2210,6 @@ impl ProjectPanel { active_selection: selection, marked_selections: selections, }; - let is_auto_folded = details.is_auto_folded; div() .id(entry_id.to_proto() as usize) .on_drag_move::(cx.listener( @@ -2314,8 +2311,9 @@ impl ProjectPanel { h_flex().h_6().w_full().child(editor.clone()) } else { h_flex().h_6().map(|this| { - if is_auto_folded && is_active { - let folded_ancestors = self.ancestors.get(&entry_id).unwrap(); + if let Some(folded_ancestors) = + is_active.then(|| self.ancestors.get(&entry_id)).flatten() + { let Some(part_to_highlight) = Path::new(&file_name) .ancestors() .nth(folded_ancestors.current_ancestor_depth) From bf64c0899f076b8a62071ffae22fb9b35af667c0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 10 Sep 2024 10:22:12 -0400 Subject: [PATCH 014/762] go: Fix regression by restoring regex to match tests (#17645) This fixes a regression that snuck in with #17108. When running a single test with `go test` the regex wouldn't be used anymore. This restores the old behavior. Release Notes: - Fixed a regression when running Go tests. A recent change dropped the regex used to match single test names when using `go test` in tasks to run tests. That could lead to more or the wrong tests being run. This restores the old behavior. --- crates/languages/src/go.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index c22a4e3eda9220..a528f4f70cdc44 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -518,7 +518,13 @@ impl ContextProvider for GoContextProvider { GO_PACKAGE_TASK_VARIABLE.template_value(), VariableName::Symbol.template_value(), ), - command: format!("go test -run {}", VariableName::Symbol.template_value(),), + command: "go".into(), + args: vec![ + "test".into(), + GO_PACKAGE_TASK_VARIABLE.template_value(), + "-run".into(), + format!("^{}\\$", VariableName::Symbol.template_value(),), + ], tags: vec!["go-test".to_owned()], cwd: package_cwd.clone(), ..TaskTemplate::default() From 93b3520c11cd645d595f6158bc0cc178238105c5 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 10 Sep 2024 11:03:44 -0400 Subject: [PATCH 015/762] assistant: Prevent possible execution of generated terminal commands (#17647) Closes #17424 Release Notes: - Fixed an issue where commands generated by the terminal command could sometimes be executed without confirmation --- crates/assistant/src/terminal_inline_assistant.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 479925b060633e..bb3f9d36bffbb5 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -988,7 +988,7 @@ impl TerminalTransaction { pub fn push(&mut self, hunk: String, cx: &mut AppContext) { // Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal - let input = hunk.replace(CARRIAGE_RETURN, " "); + let input = Self::sanitize_input(hunk); self.terminal .update(cx, |terminal, _| terminal.input(input)); } @@ -1003,6 +1003,10 @@ impl TerminalTransaction { terminal.input(CARRIAGE_RETURN.to_string()) }); } + + fn sanitize_input(input: String) -> String { + input.replace(['\r', '\n'], "") + } } pub struct Codegen { From a078cb104c01d59442bc52f192e14807723f1278 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 11:16:27 -0400 Subject: [PATCH 016/762] Disable definition lists in Markdown (#17648) This PR disables definition list support in `pulldown_cmark`, as it is has been causing a number of issues. I opened an issue upstream with the panic we were seeing: https://github.com/pulldown-cmark/pulldown-cmark/issues/957. Release Notes: - N/A --- crates/language/src/markdown.rs | 2 ++ crates/markdown/src/parser.rs | 5 ++++- crates/markdown_preview/src/markdown_parser.rs | 4 +++- crates/rich_text/src/rich_text.rs | 4 +++- 4 files changed, 12 insertions(+), 3 deletions(-) diff --git a/crates/language/src/markdown.rs b/crates/language/src/markdown.rs index 98b9ba53b106dc..b9393a16ab3cee 100644 --- a/crates/language/src/markdown.rs +++ b/crates/language/src/markdown.rs @@ -166,6 +166,7 @@ pub async fn parse_markdown_block( let mut list_stack = Vec::new(); let mut options = pulldown_cmark::Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); options.remove(pulldown_cmark::Options::ENABLE_YAML_STYLE_METADATA_BLOCKS); for event in Parser::new_ext(markdown, options) { @@ -384,6 +385,7 @@ public: void format(const int &, const std::tm &, int &dest) "#; let mut options = pulldown_cmark::Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); options.remove(pulldown_cmark::Options::ENABLE_YAML_STYLE_METADATA_BLOCKS); let parser = pulldown_cmark::Parser::new_ext(input, options); diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index 3dd11be9838a91..7d349e29efce92 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -5,10 +5,13 @@ use pulldown_cmark::{Alignment, HeadingLevel, LinkType, MetadataBlockKind, Optio use std::ops::Range; pub fn parse_markdown(text: &str) -> Vec<(Range, MarkdownEvent)> { + let mut options = Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); + let mut events = Vec::new(); let mut within_link = false; let mut within_metadata = false; - for (pulldown_event, mut range) in Parser::new_ext(text, Options::all()).into_offset_iter() { + for (pulldown_event, mut range) in Parser::new_ext(text, options).into_offset_iter() { if within_metadata { if let pulldown_cmark::Event::End(pulldown_cmark::TagEnd::MetadataBlock { .. }) = pulldown_event diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 4a607f4d726117..7e503fb609efa9 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -11,7 +11,9 @@ pub async fn parse_markdown( file_location_directory: Option, language_registry: Option>, ) -> ParsedMarkdown { - let options = Options::all(); + let mut options = Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); + let parser = Parser::new_ext(markdown_input, options); let parser = MarkdownParser::new( parser.into_offset_iter().collect(), diff --git a/crates/rich_text/src/rich_text.rs b/crates/rich_text/src/rich_text.rs index 2c4b2ca8ee3132..80b7786c2422d7 100644 --- a/crates/rich_text/src/rich_text.rs +++ b/crates/rich_text/src/rich_text.rs @@ -195,7 +195,9 @@ pub fn render_markdown_mut( let mut current_language = None; let mut list_stack = Vec::new(); - let options = Options::all(); + let mut options = Options::all(); + options.remove(pulldown_cmark::Options::ENABLE_DEFINITION_LIST); + for (event, source_range) in Parser::new_ext(block, options).into_offset_iter() { let prev_len = text.len(); match event { From a7ac37156c98bf3a48c9506b8c65284089b76b72 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 10 Sep 2024 11:23:50 -0400 Subject: [PATCH 017/762] assistant: Fix configuration page showing incorrect Anthropic API key label (#17650) Release Notes: - N/A --- crates/language_model/src/provider/anthropic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index 37ee2faf405bbf..eac4ad3021ee1d 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -695,7 +695,7 @@ impl Render for ConfigurationView { ) .child( Label::new( - "You can also assign the {ANTHROPIC_API_KEY_VAR} environment variable and restart Zed.", + format!("You can also assign the {ANTHROPIC_API_KEY_VAR} environment variable and restart Zed."), ) .size(LabelSize::Small), ) From d5498c52f8205d8e02e0a489ae3707960476a425 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 10 Sep 2024 11:45:21 -0400 Subject: [PATCH 018/762] assistant: Fix terminal inline assistant not showing retry on error (#17651) Release Notes: - Fixed an issue where a failed inline assistant prompt could not be restarted --- crates/assistant/src/terminal_inline_assistant.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index bb3f9d36bffbb5..61a8813f6c55ed 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -465,7 +465,8 @@ impl EventEmitter for PromptEditor {} impl Render for PromptEditor { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let buttons = match &self.codegen.read(cx).status { + let status = &self.codegen.read(cx).status; + let buttons = match status { CodegenStatus::Idle => { vec![ IconButton::new("cancel", IconName::Close) @@ -516,7 +517,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click(cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested))); - if self.edited_since_done { + let has_error = matches!(status, CodegenStatus::Error(_)); + if has_error || self.edited_since_done { vec![ cancel, IconButton::new("restart", IconName::RotateCw) From 5f61e3140f14f0411f4a39dbeb59af57b6810555 Mon Sep 17 00:00:00 2001 From: Eric Andres Date: Tue, 10 Sep 2024 09:49:04 -0600 Subject: [PATCH 019/762] Fix vim surround behavior around text objects (#17603) Performing `ysa")` on `"Hello World"` should produce `("Hello World")`. Instead it places the parens inside the quotes (i.e. `"(Hello World)"`). This PR fixes the behavior by preserving the `around` flag from the operator sequence. Closes #12976 and partially fixes #13841 Release Notes: - Fixed the behavior of surrounding a text object in vim. --- crates/vim/src/normal.rs | 2 +- crates/vim/src/surrounds.rs | 40 ++++++++++++++++++++++++++++++++++--- 2 files changed, 38 insertions(+), 4 deletions(-) diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index ae560acc2999e5..8198c0da53b4f0 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -248,7 +248,7 @@ impl Vim { } Some(Operator::AddSurrounds { target: None }) => { waiting_operator = Some(Operator::AddSurrounds { - target: Some(SurroundsType::Object(object)), + target: Some(SurroundsType::Object(object, around)), }); } Some(Operator::ToggleComments) => self.toggle_comments_object(object, around, cx), diff --git a/crates/vim/src/surrounds.rs b/crates/vim/src/surrounds.rs index 137801c3ee81d1..81025103fb7ca4 100644 --- a/crates/vim/src/surrounds.rs +++ b/crates/vim/src/surrounds.rs @@ -13,7 +13,7 @@ use ui::ViewContext; #[derive(Clone, Debug, PartialEq, Eq)] pub enum SurroundsType { Motion(Motion), - Object(Object), + Object(Object, bool), Selection, } @@ -59,8 +59,8 @@ impl Vim { for selection in &display_selections { let range = match &target { - SurroundsType::Object(object) => { - object.range(&display_map, selection.clone(), false) + SurroundsType::Object(object, around) => { + object.range(&display_map, selection.clone(), *around) } SurroundsType::Motion(motion) => { motion @@ -697,6 +697,40 @@ mod test { the lazy dog."}, Mode::Normal, ); + + // test add surrounds around object + cx.set_state( + indoc! {" + The [quˇick] brown + fox jumps over + the lazy dog."}, + Mode::Normal, + ); + cx.simulate_keystrokes("y s a ] )"); + cx.assert_state( + indoc! {" + The ˇ([quick]) brown + fox jumps over + the lazy dog."}, + Mode::Normal, + ); + + // test add surrounds inside object + cx.set_state( + indoc! {" + The [quˇick] brown + fox jumps over + the lazy dog."}, + Mode::Normal, + ); + cx.simulate_keystrokes("y s i ] )"); + cx.assert_state( + indoc! {" + The [ˇ(quick)] brown + fox jumps over + the lazy dog."}, + Mode::Normal, + ); } #[gpui::test] From 0b0cd9005e3f40865cd37ce32a4938570755ba16 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 10 Sep 2024 11:58:16 -0400 Subject: [PATCH 020/762] assistant: Fix file slash command not allowing to select multiple files when pressing tab (#17652) Release Notes: - Allow to add multiple files in a single `/file` command when pressing tab --- crates/assistant/src/slash_command/file_command.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index c253e5b91c0297..e5d8f1b2d6bbb2 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -164,11 +164,7 @@ impl SlashCommand for FileSlashCommand { Some(ArgumentCompletion { label, new_text: text, - after_completion: if path_match.is_dir { - AfterCompletion::Compose - } else { - AfterCompletion::Run - }, + after_completion: AfterCompletion::Compose, replace_previous_arguments: false, }) }) From 85f4c96feff44d2070b2b520de9f4620ecc84b02 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 10 Sep 2024 12:40:54 -0400 Subject: [PATCH 021/762] Ubuntu 22 (Linux arm runner) fixes (#17643) Our GitHub Actions Linux ARM hosted runner was running Ubuntu 20 was EOL'd. This gets builds working on the Ubuntu 22 Linux ARM runner which have spun to replace the EOL'd one. It pushes forward our Glibc requirement for Linux ARM users (was >= 2.29, now >= 2.35; sorry!) but also uses a newer version of clang/llvm (was 10, now 15; yay!). --- .github/workflows/ci.yml | 6 +++--- docs/src/linux.md | 16 +++++++++++----- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 37e80e5a8d32ce..02bec287144357 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -339,7 +339,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - bundle-linux-aarch64: + bundle-linux-aarch64: # this runs on ubuntu22.04 timeout-minutes: 60 name: Create arm64 Linux bundle runs-on: @@ -360,8 +360,8 @@ jobs: - name: Set up Clang run: | sudo apt-get update - sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev - echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH + sudo apt-get install -y llvm-15 clang-15 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev + echo "/usr/lib/llvm-15/bin" >> $GITHUB_PATH - uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1 with: diff --git a/docs/src/linux.md b/docs/src/linux.md index 2b9a66d51e67f0..812a3707d01b1b 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -12,15 +12,21 @@ We also offer a preview build of Zed which receives updates about a week ahead o curl -f https://zed.dev/install.sh | ZED_CHANNEL=preview sh ``` -The Zed installed by the script does not work on systems that: +The Zed installed by the script works best on systems that: -- have no Vulkan compatible GPU available (for example Linux on an M-series macBook) -- have no system-wide glibc (for example on NixOS or Alpine by default) -- have a glibc older than version 2.29 (for example Amazon Linux 2 or Ubuntu 18 and earlier) -- use an architecture other than 64-bit Intel or 64-bit ARM (for example a 32-bit or RISC-V machine) +- have a Vulkan compatible GPU available (for example Linux on an M-series macBook) +- have a system-wide glibc (NixOS and Alpine do not by default) + - x86_64 (Intel/AMD): glibc version >= 2.29 (Ubuntu 20 and newer; Amazon Linux >2023) + - aarch64 (ARM): glibc version >= 2.35 (Ubuntu 22 and newer) Both Nix and Alpine have third-party Zed packages available (though they are currently a few weeks out of date). If you'd like to use our builds they do work if you install a glibc compatibility layer. On NixOS you can try [nix-ld](https://github.com/Mic92/nix-ld), and on Alpine [gcompat](https://wiki.alpinelinux.org/wiki/Running_glibc_programs). +You will need to build from source for: + +- architectures other than 64-bit Intel or 64-bit ARM (for example a 32-bit or RISC-V machine) +- Amazon Linux 2 on x86_64 +- Rocky Linux 9.3 + ## Other ways to install Zed on Linux Zed is open source, and [you can install from source](./development/linux.md). From fb9d01b0d5c23a1f057bac06c86d5dbaa9a7c39d Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 10 Sep 2024 13:41:06 -0400 Subject: [PATCH 022/762] assistant: Add display_name for OpenAI and Gemini (#17508) --- crates/assistant/src/assistant_settings.rs | 2 ++ crates/google_ai/src/google_ai.rs | 11 +++++++++-- crates/language_model/src/provider/cloud.rs | 2 ++ crates/language_model/src/provider/google.rs | 2 ++ crates/language_model/src/provider/open_ai.rs | 8 +++----- crates/language_model/src/settings.rs | 2 ++ crates/open_ai/src/open_ai.rs | 6 +++++- docs/src/assistant/configuration.md | 17 +++++++++-------- 8 files changed, 34 insertions(+), 16 deletions(-) diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index d57c1f19b685c0..3e326886d50d71 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -160,10 +160,12 @@ impl AssistantSettingsContent { .filter_map(|model| match model { OpenAiModel::Custom { name, + display_name, max_tokens, max_output_tokens, } => Some(open_ai::AvailableModel { name, + display_name, max_tokens, max_output_tokens, }), diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index f0803b402947f3..f1dcedf5b31e0c 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -304,7 +304,12 @@ pub enum Model { #[serde(rename = "gemini-1.5-flash")] Gemini15Flash, #[serde(rename = "custom")] - Custom { name: String, max_tokens: usize }, + Custom { + name: String, + /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + display_name: Option, + max_tokens: usize, + }, } impl Model { @@ -320,7 +325,9 @@ impl Model { match self { Model::Gemini15Pro => "Gemini 1.5 Pro", Model::Gemini15Flash => "Gemini 1.5 Flash", - Model::Custom { name, .. } => name, + Self::Custom { + name, display_name, .. + } => display_name.as_ref().unwrap_or(name), } } diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 3db155393d6277..0de7fb3feb49e2 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -254,11 +254,13 @@ impl LanguageModelProvider for CloudLanguageModelProvider { }), AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom { name: model.name.clone(), + display_name: model.display_name.clone(), max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, }), AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom { name: model.name.clone(), + display_name: model.display_name.clone(), max_tokens: model.max_tokens, }), }; diff --git a/crates/language_model/src/provider/google.rs b/crates/language_model/src/provider/google.rs index 1b24e8eda9864f..fc4a7a7a3405a0 100644 --- a/crates/language_model/src/provider/google.rs +++ b/crates/language_model/src/provider/google.rs @@ -37,6 +37,7 @@ pub struct GoogleSettings { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct AvailableModel { name: String, + display_name: Option, max_tokens: usize, } @@ -170,6 +171,7 @@ impl LanguageModelProvider for GoogleLanguageModelProvider { model.name.clone(), google_ai::Model::Custom { name: model.name.clone(), + display_name: model.display_name.clone(), max_tokens: model.max_tokens, }, ); diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 6b1790c1a17bae..15d84f6cca2efc 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -40,6 +40,7 @@ pub struct OpenAiSettings { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct AvailableModel { pub name: String, + pub display_name: Option, pub max_tokens: usize, pub max_output_tokens: Option, } @@ -171,6 +172,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider { model.name.clone(), open_ai::Model::Custom { name: model.name.clone(), + display_name: model.display_name.clone(), max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, }, @@ -368,11 +370,7 @@ pub fn count_open_ai_tokens( }) .collect::>(); - if let open_ai::Model::Custom { .. } = model { - tiktoken_rs::num_tokens_from_messages("gpt-4", &messages) - } else { - tiktoken_rs::num_tokens_from_messages(model.id(), &messages) - } + tiktoken_rs::num_tokens_from_messages(model.id(), &messages) }) .boxed() } diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 8d3838d2369c62..0059ed56c4c63b 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -175,12 +175,14 @@ impl OpenAiSettingsContent { .filter_map(|model| match model { open_ai::Model::Custom { name, + display_name, max_tokens, max_output_tokens, } => Some(provider::open_ai::AvailableModel { name, max_tokens, max_output_tokens, + display_name, }), _ => None, }) diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 6be5327c04f630..5b621d6bb844c3 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -68,6 +68,8 @@ pub enum Model { #[serde(rename = "custom")] Custom { name: String, + /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + display_name: Option, max_tokens: usize, max_output_tokens: Option, }, @@ -103,7 +105,9 @@ impl Model { Self::FourTurbo => "gpt-4-turbo", Self::FourOmni => "gpt-4o", Self::FourOmniMini => "gpt-4o-mini", - Self::Custom { name, .. } => name, + Self::Custom { + name, display_name, .. + } => display_name.as_ref().unwrap_or(name), } } diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index eaf5ed13b4c246..0fd242c6191b4d 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -77,7 +77,7 @@ You can use Gemini 1.5 Pro/Flash with the Zed assistant by choosing it via the m 1. Go the Google AI Studio site and [create an API key](https://aistudio.google.com/app/apikey). 2. Open the configuration view (`assistant: show configuration`) and navigate to the Google AI section -3. Enter your Google AI API key +3. Enter your Google AI API key and press enter. The Google AI API key will be saved in your keychain. @@ -85,7 +85,7 @@ Zed will also use the `GOOGLE_AI_API_KEY` environment variable if it's defined. #### Google AI custom models {#google-ai-custom-models} -You can add custom models to the Google AI provider by adding the following to your Zed `settings.json`: +By default Zed will use `stable` versions of models, but you can use specific versions of models, including [experimental models](https://ai.google.dev/gemini-api/docs/models/experimental-models) with the Google AI provider by adding the following to your Zed `settings.json`: ```json { @@ -93,8 +93,9 @@ You can add custom models to the Google AI provider by adding the following to y "google": { "available_models": [ { - "name": "custom-model", - "max_tokens": 128000 + "name": "gemini-1.5-flash-latest", + "display_name": "Gemini 1.5 Flash (Latest)", + "max_tokens": 1000000 } ] } @@ -164,16 +165,16 @@ Zed will also use the `OPENAI_API_KEY` environment variable if it's defined. #### OpenAI Custom Models {#openai-custom-models} -You can add custom models to the OpenAI provider, by adding the following to your Zed `settings.json`: +The Zed Assistant comes pre-configured to use the latest version for common models (GPT-3.5 Turbo, GPT-4, GPT-4 Turbo, GPT-4o, GPT-4o mini). If you wish to use alternate models, perhaps a preview release or a dated model release, you can do so by adding the following to your Zed `settings.json`: ```json { "language_models": { "openai": { - "version": "1", "available_models": [ { - "name": "custom-model", + "provider": "openai", + "name": "gpt-4o-2024-08-06", "max_tokens": 128000 } ] @@ -182,7 +183,7 @@ You can add custom models to the OpenAI provider, by adding the following to you } ``` -Custom models will be listed in the model dropdown in the assistant panel. +You must provide the model's Context Window in the `max_tokens` parameter, this can be found [OpenAI Model Docs](https://platform.openai.com/docs/models). Custom models will be listed in the model dropdown in the assistant panel. ### Advanced configuration {#advanced-configuration} From 06142f975b13f78455c31fae54bced9b82f308a3 Mon Sep 17 00:00:00 2001 From: Niklas Haas Date: Tue, 10 Sep 2024 20:26:48 +0200 Subject: [PATCH 023/762] Use the configured UI font size for the inline assistant (#17542) --- crates/assistant/src/inline_assistant.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index bfd85d2525aaae..7bd74ccabf4763 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1921,7 +1921,7 @@ impl PromptEditor { font_family: settings.ui_font.family.clone(), font_features: settings.ui_font.features.clone(), font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: rems(0.875).into(), + font_size: settings.ui_font_size.into(), font_weight: settings.ui_font.weight, line_height: relative(1.3), ..Default::default() From ae3880e71a86ef1ddc6218d4a87d683a95672c68 Mon Sep 17 00:00:00 2001 From: KorigamiK <72932688+KorigamiK@users.noreply.github.com> Date: Wed, 11 Sep 2024 00:06:36 +0530 Subject: [PATCH 024/762] Add ability to open files with system default application (#17231) --- assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 2 +- assets/keymaps/vim.json | 1 + crates/gpui/src/app.rs | 5 +++++ crates/gpui/src/platform.rs | 1 + crates/gpui/src/platform/linux/platform.rs | 13 +++++++++++++ crates/gpui/src/platform/mac/platform.rs | 14 ++++++++++++++ crates/gpui/src/platform/test/platform.rs | 4 ++++ crates/gpui/src/platform/windows/platform.rs | 13 +++++++++++++ crates/project_panel/src/project_panel.rs | 10 ++++++++++ 10 files changed, 63 insertions(+), 1 deletion(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index bd2ade4246b17c..3c627d7803e1d5 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -553,6 +553,7 @@ "ctrl-backspace": ["project_panel::Delete", { "skip_prompt": false }], "ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }], "alt-ctrl-r": "project_panel::RevealInFileManager", + "ctrl-shift-enter": "project_panel::OpenWithSystem", "alt-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrev", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index dec5cbd9f398b9..ed6ece0556e038 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -563,8 +563,8 @@ "cmd-backspace": ["project_panel::Trash", { "skip_prompt": true }], "cmd-delete": ["project_panel::Delete", { "skip_prompt": false }], "alt-cmd-r": "project_panel::RevealInFileManager", + "ctrl-shift-enter": "project_panel::OpenWithSystem", "cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }], - "alt-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrev", diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index f863e8488a0e28..54905b22678cd1 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -493,6 +493,7 @@ "v": "project_panel::OpenPermanent", "p": "project_panel::Open", "x": "project_panel::RevealInFileManager", + "s": "project_panel::OpenWithSystem", "shift-g": "menu::SelectLast", "g g": "menu::SelectFirst", "-": "project_panel::SelectParent", diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 2157f97634daaf..564b8934897734 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -657,6 +657,11 @@ impl AppContext { self.platform.reveal_path(path) } + /// Opens the specified path with the system's default application. + pub fn open_with_system(&self, path: &Path) { + self.platform.open_with_system(path) + } + /// Returns whether the user has configured scrollbars to auto-hide at the platform level. pub fn should_auto_hide_scrollbars(&self) -> bool { self.platform.should_auto_hide_scrollbars() diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index cb54d9d47a0873..680c813078b86d 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -149,6 +149,7 @@ pub(crate) trait Platform: 'static { ) -> oneshot::Receiver>>>; fn prompt_for_new_path(&self, directory: &Path) -> oneshot::Receiver>>; fn reveal_path(&self, path: &Path); + fn open_with_system(&self, path: &Path); fn on_quit(&self, callback: Box); fn on_reopen(&self, callback: Box); diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index 0aa17e534ab760..a0bd6b1d33d176 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -351,6 +351,19 @@ impl Platform for P { self.reveal_path(path.to_owned()); } + fn open_with_system(&self, path: &Path) { + let executor = self.background_executor().clone(); + let path = path.to_owned(); + executor + .spawn(async move { + let _ = std::process::Command::new("xdg-open") + .arg(path) + .spawn() + .expect("Failed to open file with xdg-open"); + }) + .detach(); + } + fn on_quit(&self, callback: Box) { self.with_common(|common| { common.callbacks.quit = Some(callback); diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index d03d8f0571f9b1..5873d8fe396d71 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -718,6 +718,20 @@ impl Platform for MacPlatform { } } + fn open_with_system(&self, path: &Path) { + let path = path.to_path_buf(); + self.0 + .lock() + .background_executor + .spawn(async move { + std::process::Command::new("open") + .arg(path) + .spawn() + .expect("Failed to open file"); + }) + .detach(); + } + fn on_quit(&self, callback: Box) { self.0.lock().quit = Some(callback); } diff --git a/crates/gpui/src/platform/test/platform.rs b/crates/gpui/src/platform/test/platform.rs index 58ca694d89ee02..3258ae9af59ab2 100644 --- a/crates/gpui/src/platform/test/platform.rs +++ b/crates/gpui/src/platform/test/platform.rs @@ -318,6 +318,10 @@ impl Platform for TestPlatform { fn register_url_scheme(&self, _: &str) -> Task> { unimplemented!() } + + fn open_with_system(&self, _path: &Path) { + unimplemented!() + } } #[cfg(target_os = "windows")] diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 2dcaf72ef25162..f8b3924e6282b1 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -400,6 +400,19 @@ impl Platform for WindowsPlatform { .detach(); } + fn open_with_system(&self, path: &Path) { + let executor = self.background_executor().clone(); + let path = path.to_owned(); + executor + .spawn(async move { + let _ = std::process::Command::new("cmd") + .args(&["/c", "start", "", path.to_str().expect("path to string")]) + .spawn() + .expect("Failed to open file"); + }) + .detach(); + } + fn on_quit(&self, callback: Box) { self.state.borrow_mut().callbacks.quit = Some(callback); } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 56d524cdc7130c..c77a2170dd01cd 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -146,6 +146,7 @@ actions!( CopyRelativePath, Duplicate, RevealInFileManager, + OpenWithSystem, Cut, Paste, Rename, @@ -500,6 +501,7 @@ impl ProjectPanel { .when(cfg!(not(target_os = "macos")), |menu| { menu.action("Reveal in File Manager", Box::new(RevealInFileManager)) }) + .action("Open in Default App", Box::new(OpenWithSystem)) .action("Open in Terminal", Box::new(OpenInTerminal)) .when(is_dir, |menu| { menu.separator() @@ -1497,6 +1499,13 @@ impl ProjectPanel { } } + fn open_system(&mut self, _: &OpenWithSystem, cx: &mut ViewContext) { + if let Some((worktree, entry)) = self.selected_entry(cx) { + let abs_path = worktree.abs_path().join(&entry.path); + cx.open_with_system(&abs_path); + } + } + fn open_in_terminal(&mut self, _: &OpenInTerminal, cx: &mut ViewContext) { if let Some((worktree, entry)) = self.selected_sub_entry(cx) { let abs_path = worktree.abs_path().join(&entry.path); @@ -2711,6 +2720,7 @@ impl Render for ProjectPanel { }) .when(project.is_local_or_ssh(), |el| { el.on_action(cx.listener(Self::reveal_in_finder)) + .on_action(cx.listener(Self::open_system)) .on_action(cx.listener(Self::open_in_terminal)) }) .on_mouse_down( From 1b627925d3874b8461212945e8b41a28c5022aad Mon Sep 17 00:00:00 2001 From: Joseph T Lyons Date: Tue, 10 Sep 2024 14:40:51 -0400 Subject: [PATCH 025/762] v0.154.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4f34f055ad6692..f9d3240d682a93 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14197,7 +14197,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.153.0" +version = "0.154.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index de8bc1f7678378..ad02d4f38879ef 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.153.0" +version = "0.154.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From a23e381096c623951212608119fc497101e281f1 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 15:25:57 -0400 Subject: [PATCH 026/762] assistant: Pass up tool results in LLM request messages (#17656) This PR makes it so we pass up the tool results in the `tool_results` field in the request message to the LLM. This required reworking how we track non-text content in the context editor. We also removed serialization of images in context history, as we were never deserializing it, and thus it was unneeded. Release Notes: - N/A --------- Co-authored-by: Antonio --- crates/assistant/src/assistant_panel.rs | 39 ++- crates/assistant/src/context.rs | 381 +++++++++++------------- crates/paths/src/paths.rs | 6 - 3 files changed, 215 insertions(+), 211 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 82888b498a3c80..22843d41cd6207 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -11,7 +11,7 @@ use crate::{ }, slash_command_picker, terminal_inline_assistant::TerminalInlineAssistant, - Assist, CacheStatus, ConfirmCommand, Context, ContextEvent, ContextId, ContextStore, + Assist, CacheStatus, ConfirmCommand, Content, Context, ContextEvent, ContextId, ContextStore, ContextStoreEvent, CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssistId, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, PendingSlashCommand, @@ -46,6 +46,7 @@ use indexed_docs::IndexedDocsStore; use language::{ language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset, }; +use language_model::LanguageModelToolUse; use language_model::{ provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, Role, @@ -1995,6 +1996,20 @@ impl ContextEditor { let buffer_row = MultiBufferRow(start.to_point(&buffer).row); buffer_rows_to_fold.insert(buffer_row); + self.context.update(cx, |context, cx| { + context.insert_content( + Content::ToolUse { + range: tool_use.source_range.clone(), + tool_use: LanguageModelToolUse { + id: tool_use.id.to_string(), + name: tool_use.name.clone(), + input: tool_use.input.clone(), + }, + }, + cx, + ); + }); + Crease::new( start..end, placeholder, @@ -3538,7 +3553,7 @@ impl ContextEditor { let image_id = image.id(); context.insert_image(image, cx); for image_position in image_positions.iter() { - context.insert_image_anchor(image_id, image_position.text_anchor, cx); + context.insert_image_content(image_id, image_position.text_anchor, cx); } } }); @@ -3553,11 +3568,23 @@ impl ContextEditor { let new_blocks = self .context .read(cx) - .images(cx) - .filter_map(|image| { + .contents(cx) + .filter_map(|content| { + if let Content::Image { + anchor, + render_image, + .. + } = content + { + Some((anchor, render_image)) + } else { + None + } + }) + .filter_map(|(anchor, render_image)| { const MAX_HEIGHT_IN_LINES: u32 = 8; - let anchor = buffer.anchor_in_excerpt(excerpt_id, image.anchor).unwrap(); - let image = image.render_image.clone(); + let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap(); + let image = render_image.clone(); anchor.is_valid(&buffer).then(|| BlockProperties { position: anchor, height: MAX_HEIGHT_IN_LINES, diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 1bf846369b7627..e43ec203e96752 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -17,7 +17,6 @@ use feature_flags::{FeatureFlag, FeatureFlagAppExt}; use fs::{Fs, RemoveOptions}; use futures::{ future::{self, Shared}, - stream::FuturesUnordered, FutureExt, StreamExt, }; use gpui::{ @@ -29,10 +28,11 @@ use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, P use language_model::{ LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent, LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelRequestTool, MessageContent, Role, StopReason, + LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role, + StopReason, }; use open_ai::Model as OpenAiModel; -use paths::{context_images_dir, contexts_dir}; +use paths::contexts_dir; use project::Project; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; @@ -377,23 +377,8 @@ impl MessageMetadata { } } -#[derive(Clone, Debug)] -pub struct MessageImage { - image_id: u64, - image: Shared>>, -} - -impl PartialEq for MessageImage { - fn eq(&self, other: &Self) -> bool { - self.image_id == other.image_id - } -} - -impl Eq for MessageImage {} - #[derive(Clone, Debug)] pub struct Message { - pub image_offsets: SmallVec<[(usize, MessageImage); 1]>, pub offset_range: Range, pub index_range: Range, pub anchor_range: Range, @@ -403,62 +388,45 @@ pub struct Message { pub cache: Option, } -impl Message { - fn to_request_message(&self, buffer: &Buffer) -> Option { - let mut content = Vec::new(); - - let mut range_start = self.offset_range.start; - for (image_offset, message_image) in self.image_offsets.iter() { - if *image_offset != range_start { - if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) { - content.push(text); - } - } - - if let Some(image) = message_image.image.clone().now_or_never().flatten() { - content.push(language_model::MessageContent::Image(image)); - } - - range_start = *image_offset; - } - - if range_start != self.offset_range.end { - if let Some(text) = - Self::collect_text_content(buffer, range_start..self.offset_range.end) - { - content.push(text); - } - } +#[derive(Debug, Clone)] +pub enum Content { + Image { + anchor: language::Anchor, + image_id: u64, + render_image: Arc, + image: Shared>>, + }, + ToolUse { + range: Range, + tool_use: LanguageModelToolUse, + }, + ToolResult { + range: Range, + tool_use_id: Arc, + }, +} - if content.is_empty() { - return None; +impl Content { + fn range(&self) -> Range { + match self { + Self::Image { anchor, .. } => *anchor..*anchor, + Self::ToolUse { range, .. } | Self::ToolResult { range, .. } => range.clone(), } - - Some(LanguageModelRequestMessage { - role: self.role, - content, - cache: self.cache.as_ref().map_or(false, |cache| cache.is_anchor), - }) } - fn collect_text_content(buffer: &Buffer, range: Range) -> Option { - let text: String = buffer.text_for_range(range.clone()).collect(); - if text.trim().is_empty() { - None + fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering { + let self_range = self.range(); + let other_range = other.range(); + if self_range.end.cmp(&other_range.start, buffer).is_lt() { + Ordering::Less + } else if self_range.start.cmp(&other_range.end, buffer).is_gt() { + Ordering::Greater } else { - Some(MessageContent::Text(text)) + Ordering::Equal } } } -#[derive(Clone, Debug)] -pub struct ImageAnchor { - pub anchor: language::Anchor, - pub image_id: u64, - pub render_image: Arc, - pub image: Shared>>, -} - struct PendingCompletion { id: usize, assistant_message_id: MessageId, @@ -501,7 +469,7 @@ pub struct Context { pending_tool_uses_by_id: HashMap, PendingToolUse>, message_anchors: Vec, images: HashMap, Shared>>)>, - image_anchors: Vec, + contents: Vec, messages_metadata: HashMap, summary: Option, pending_summary: Task>, @@ -595,7 +563,7 @@ impl Context { pending_ops: Vec::new(), operations: Vec::new(), message_anchors: Default::default(), - image_anchors: Default::default(), + contents: Default::default(), images: Default::default(), messages_metadata: Default::default(), pending_slash_commands: Vec::new(), @@ -659,11 +627,6 @@ impl Context { id: message.id, start: message.offset_range.start, metadata: self.messages_metadata[&message.id].clone(), - image_offsets: message - .image_offsets - .iter() - .map(|image_offset| (image_offset.0, image_offset.1.image_id)) - .collect(), }) .collect(), summary: self @@ -1957,6 +1920,14 @@ impl Context { output_range }); + this.insert_content( + Content::ToolResult { + range: anchor_range.clone(), + tool_use_id: tool_use_id.clone(), + }, + cx, + ); + cx.emit(ContextEvent::ToolFinished { tool_use_id, output_range: anchor_range, @@ -2038,6 +2009,7 @@ impl Context { let stream_completion = async { let request_start = Instant::now(); let mut events = stream.await?; + let mut stop_reason = StopReason::EndTurn; while let Some(event) = events.next().await { if response_latency.is_none() { @@ -2050,7 +2022,7 @@ impl Context { .message_anchors .iter() .position(|message| message.id == assistant_message_id)?; - let event_to_emit = this.buffer.update(cx, |buffer, cx| { + this.buffer.update(cx, |buffer, cx| { let message_old_end_offset = this.message_anchors[message_ix + 1..] .iter() .find(|message| message.start.is_valid(buffer)) @@ -2059,13 +2031,9 @@ impl Context { }); match event { - LanguageModelCompletionEvent::Stop(reason) => match reason { - StopReason::ToolUse => { - return Some(ContextEvent::UsePendingTools); - } - StopReason::EndTurn => {} - StopReason::MaxTokens => {} - }, + LanguageModelCompletionEvent::Stop(reason) => { + stop_reason = reason; + } LanguageModelCompletionEvent::Text(chunk) => { buffer.edit( [( @@ -2116,14 +2084,9 @@ impl Context { ); } } - - None }); cx.emit(ContextEvent::StreamedCompletion); - if let Some(event) = event_to_emit { - cx.emit(event); - } Some(()) })?; @@ -2136,13 +2099,14 @@ impl Context { this.update_cache_status_for_completion(cx); })?; - anyhow::Ok(()) + anyhow::Ok(stop_reason) }; let result = stream_completion.await; this.update(&mut cx, |this, cx| { let error_message = result + .as_ref() .err() .map(|error| error.to_string().trim().to_string()); @@ -2170,6 +2134,16 @@ impl Context { error_message, ); } + + if let Ok(stop_reason) = result { + match stop_reason { + StopReason::ToolUse => { + cx.emit(ContextEvent::UsePendingTools); + } + StopReason::EndTurn => {} + StopReason::MaxTokens => {} + } + } }) .ok(); } @@ -2186,18 +2160,94 @@ impl Context { pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest { let buffer = self.buffer.read(cx); - let request_messages = self - .messages(cx) - .filter(|message| message.status == MessageStatus::Done) - .filter_map(|message| message.to_request_message(&buffer)) - .collect(); - LanguageModelRequest { - messages: request_messages, + let mut contents = self.contents(cx).peekable(); + + fn collect_text_content(buffer: &Buffer, range: Range) -> Option { + let text: String = buffer.text_for_range(range.clone()).collect(); + if text.trim().is_empty() { + None + } else { + Some(text) + } + } + + let mut completion_request = LanguageModelRequest { + messages: Vec::new(), tools: Vec::new(), stop: Vec::new(), temperature: 1.0, + }; + for message in self.messages(cx) { + if message.status != MessageStatus::Done { + continue; + } + + let mut offset = message.offset_range.start; + let mut request_message = LanguageModelRequestMessage { + role: message.role, + content: Vec::new(), + cache: message + .cache + .as_ref() + .map_or(false, |cache| cache.is_anchor), + }; + + while let Some(content) = contents.peek() { + if content + .range() + .end + .cmp(&message.anchor_range.end, buffer) + .is_lt() + { + let content = contents.next().unwrap(); + let range = content.range().to_offset(buffer); + request_message.content.extend( + collect_text_content(buffer, offset..range.start).map(MessageContent::Text), + ); + + match content { + Content::Image { image, .. } => { + if let Some(image) = image.clone().now_or_never().flatten() { + request_message + .content + .push(language_model::MessageContent::Image(image)); + } + } + Content::ToolUse { tool_use, .. } => { + request_message + .content + .push(language_model::MessageContent::ToolUse(tool_use.clone())); + } + Content::ToolResult { tool_use_id, .. } => { + request_message.content.push( + language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_use_id.to_string(), + is_error: false, + content: collect_text_content(buffer, range.clone()) + .unwrap_or_default(), + }, + ), + ); + } + } + + offset = range.end; + } else { + break; + } + } + + request_message.content.extend( + collect_text_content(buffer, offset..message.offset_range.end) + .map(MessageContent::Text), + ); + + completion_request.messages.push(request_message); } + + completion_request } pub fn cancel_last_assist(&mut self, cx: &mut ModelContext) -> bool { @@ -2335,42 +2385,50 @@ impl Context { Some(()) } - pub fn insert_image_anchor( + pub fn insert_image_content( &mut self, image_id: u64, anchor: language::Anchor, cx: &mut ModelContext, - ) -> bool { - cx.emit(ContextEvent::MessagesEdited); - - let buffer = self.buffer.read(cx); - let insertion_ix = match self - .image_anchors - .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer)) - { - Ok(ix) => ix, - Err(ix) => ix, - }; - + ) { if let Some((render_image, image)) = self.images.get(&image_id) { - self.image_anchors.insert( - insertion_ix, - ImageAnchor { + self.insert_content( + Content::Image { anchor, image_id, image: image.clone(), render_image: render_image.clone(), }, + cx, ); - - true - } else { - false } } - pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator { - self.image_anchors.iter().cloned() + pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext) { + let buffer = self.buffer.read(cx); + let insertion_ix = match self + .contents + .binary_search_by(|probe| probe.cmp(&content, buffer)) + { + Ok(ix) => { + self.contents.remove(ix); + ix + } + Err(ix) => ix, + }; + self.contents.insert(insertion_ix, content); + cx.emit(ContextEvent::MessagesEdited); + } + + pub fn contents<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator { + let buffer = self.buffer.read(cx); + self.contents + .iter() + .filter(|content| { + let range = content.range(); + range.start.is_valid(buffer) && range.end.is_valid(buffer) + }) + .cloned() } pub fn split_message( @@ -2533,22 +2591,14 @@ impl Context { return; } - let messages = self - .messages(cx) - .filter_map(|message| message.to_request_message(self.buffer.read(cx))) - .chain(Some(LanguageModelRequestMessage { - role: Role::User, - content: vec![ - "Summarize the context into a short title without punctuation.".into(), - ], - cache: false, - })); - let request = LanguageModelRequest { - messages: messages.collect(), - tools: Vec::new(), - stop: Vec::new(), - temperature: 1.0, - }; + let mut request = self.to_completion_request(cx); + request.messages.push(LanguageModelRequestMessage { + role: Role::User, + content: vec![ + "Summarize the context into a short title without punctuation.".into(), + ], + cache: false, + }); self.pending_summary = cx.spawn(|this, mut cx| { async move { @@ -2648,10 +2698,8 @@ impl Context { cx: &'a AppContext, ) -> impl 'a + Iterator { let buffer = self.buffer.read(cx); - let messages = message_anchors.enumerate(); - let images = self.image_anchors.iter(); - Self::messages_from_iters(buffer, &self.messages_metadata, messages, images) + Self::messages_from_iters(buffer, &self.messages_metadata, message_anchors.enumerate()) } pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator { @@ -2662,10 +2710,8 @@ impl Context { buffer: &'a Buffer, metadata: &'a HashMap, messages: impl Iterator + 'a, - images: impl Iterator + 'a, ) -> impl 'a + Iterator { let mut messages = messages.peekable(); - let mut images = images.peekable(); iter::from_fn(move || { if let Some((start_ix, message_anchor)) = messages.next() { @@ -2686,22 +2732,6 @@ impl Context { let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX); let message_end = message_end_anchor.to_offset(buffer); - let mut image_offsets = SmallVec::new(); - while let Some(image_anchor) = images.peek() { - if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() { - image_offsets.push(( - image_anchor.anchor.to_offset(buffer), - MessageImage { - image_id: image_anchor.image_id, - image: image_anchor.image.clone(), - }, - )); - images.next(); - } else { - break; - } - } - return Some(Message { index_range: start_ix..end_ix, offset_range: message_start..message_end, @@ -2710,7 +2740,6 @@ impl Context { role: metadata.role, status: metadata.status.clone(), cache: metadata.cache.clone(), - image_offsets, }); } None @@ -2748,9 +2777,6 @@ impl Context { })?; if let Some(summary) = summary { - this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))? - .await; - let context = this.read_with(&cx, |this, cx| this.serialize(cx))?; let mut discriminant = 1; let mut new_path; @@ -2790,45 +2816,6 @@ impl Context { }); } - pub fn serialize_images(&self, fs: Arc, cx: &AppContext) -> Task<()> { - let mut images_to_save = self - .images - .iter() - .map(|(id, (_, llm_image))| { - let fs = fs.clone(); - let llm_image = llm_image.clone(); - let id = *id; - async move { - if let Some(llm_image) = llm_image.await { - let path: PathBuf = - context_images_dir().join(&format!("{}.png.base64", id)); - if fs - .metadata(path.as_path()) - .await - .log_err() - .flatten() - .is_none() - { - fs.atomic_write(path, llm_image.source.to_string()) - .await - .log_err(); - } - } - } - }) - .collect::>(); - cx.background_executor().spawn(async move { - if fs - .create_dir(context_images_dir().as_ref()) - .await - .log_err() - .is_some() - { - while let Some(_) = images_to_save.next().await {} - } - }) - } - pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext) { let timestamp = self.next_timestamp(); let summary = self.summary.get_or_insert(ContextSummary::default()); @@ -2914,9 +2901,6 @@ pub struct SavedMessage { pub id: MessageId, pub start: usize, pub metadata: MessageMetadata, - #[serde(default)] - // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field. - pub image_offsets: Vec<(usize, u64)>, } #[derive(Serialize, Deserialize)] @@ -3102,7 +3086,6 @@ impl SavedContextV0_3_0 { timestamp, cache: None, }, - image_offsets: Vec::new(), }) }) .collect(), diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index 4d6a3b6d92eb54..b80bef5f2d623e 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -170,12 +170,6 @@ pub fn contexts_dir() -> &'static PathBuf { }) } -/// Returns the path within the contexts directory where images from contexts are stored. -pub fn context_images_dir() -> &'static PathBuf { - static CONTEXT_IMAGES_DIR: OnceLock = OnceLock::new(); - CONTEXT_IMAGES_DIR.get_or_init(|| contexts_dir().join("images")) -} - /// Returns the path to the contexts directory. /// /// This is where the prompts for use with the Assistant are stored. From bd1ff476b987ed2570f90b76e8c028d7e7951543 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 10 Sep 2024 15:38:27 -0400 Subject: [PATCH 027/762] Revert tokenizer for custom OpenAI models (#17660) Fix for custom openai models tokenizer settings. --- crates/language_model/src/provider/open_ai.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 15d84f6cca2efc..a7a962e9251671 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -370,7 +370,11 @@ pub fn count_open_ai_tokens( }) .collect::>(); - tiktoken_rs::num_tokens_from_messages(model.id(), &messages) + if let open_ai::Model::Custom { .. } = model { + tiktoken_rs::num_tokens_from_messages("gpt-4", &messages) + } else { + tiktoken_rs::num_tokens_from_messages(model.id(), &messages) + } }) .boxed() } From 130f19d8f953ca43bf6c833ac0e0c7362a170850 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 10 Sep 2024 15:50:51 -0400 Subject: [PATCH 028/762] Correctly merge settings for vtsls (#17657) Release Notes: - Fixed vtsls initialization_options in project settings files --- crates/languages/src/vtsls.rs | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index d38ee85f313891..3dca82688cb81d 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -7,14 +7,14 @@ use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::project_settings::{BinarySettings, ProjectSettings}; use serde_json::{json, Value}; -use settings::Settings; +use settings::{Settings, SettingsLocation}; use std::{ any::Any, ffi::OsString, path::{Path, PathBuf}, sync::Arc, }; -use util::{maybe, ResultExt}; +use util::{maybe, merge_json_value_into, ResultExt}; fn typescript_server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -274,17 +274,29 @@ impl LspAdapter for VtslsLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(SERVER_NAME) - .and_then(|s| s.initialization_options.clone()) + ProjectSettings::get( + Some(SettingsLocation { + worktree_id: adapter.worktree_id(), + path: adapter.worktree_root_path(), + }), + cx, + ) + .lsp + .get(SERVER_NAME) + .and_then(|s| s.initialization_options.clone()) })?; if let Some(options) = override_options { return Ok(options); } - self.initialization_options(adapter) + let mut initialization_options = self + .initialization_options(adapter) .await - .map(|o| o.unwrap()) + .map(|o| o.unwrap())?; + + if let Some(override_options) = override_options { + merge_json_value_into(override_options, &mut initialization_options) + } + Ok(initialization_options) } fn language_ids(&self) -> HashMap { From 36eb1c15eaf7bff72bf120e1fccc7a73326ad8e9 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 10 Sep 2024 15:51:01 -0400 Subject: [PATCH 029/762] use ssh lsp store (#17655) Release Notes: - ssh remoting: Added support for booting langauge servers (in limited circumstances) --------- Co-authored-by: Mikayla --- crates/assistant/src/assistant_panel.rs | 15 +- crates/assistant/src/inline_assistant.rs | 4 +- crates/assistant/src/prompts.rs | 6 +- crates/collab/src/tests/integration_tests.rs | 10 +- .../remote_editing_collaboration_tests.rs | 2 +- crates/editor/src/clangd_ext.rs | 2 +- crates/editor/src/editor.rs | 2 +- crates/editor/src/editor_tests.rs | 10 +- crates/editor/src/items.rs | 4 +- crates/editor/src/rust_analyzer_ext.rs | 2 +- .../src/test/editor_lsp_test_context.rs | 2 +- crates/extension/src/extension_lsp_adapter.rs | 1 - crates/extension/src/extension_manifest.rs | 8 +- crates/extension/src/extension_store.rs | 5 +- crates/extension/src/extension_store_test.rs | 2 +- .../src/wasm_host/wit/since_v0_1_0.rs | 4 +- crates/gpui/src/app.rs | 6 + crates/language/src/buffer_tests.rs | 56 +- crates/language/src/language.rs | 59 +- crates/language/src/language_registry.rs | 211 ++- crates/language/src/language_settings.rs | 12 +- .../src/active_buffer_language.rs | 4 +- .../src/language_selector.rs | 2 +- crates/language_tools/src/lsp_log.rs | 2 +- crates/language_tools/src/syntax_tree_view.rs | 2 +- crates/languages/src/rust.rs | 2 +- crates/languages/src/yaml.rs | 2 +- crates/lsp/src/lsp.rs | 10 + .../src/markdown_preview_view.rs | 2 +- .../project/src/lsp_command/signature_help.rs | 2 +- crates/project/src/lsp_store.rs | 1358 ++++++++++++----- crates/project/src/project.rs | 122 +- crates/project/src/project_settings.rs | 2 +- crates/project/src/project_tests.rs | 14 +- crates/project/src/task_inventory.rs | 4 +- crates/proto/proto/zed.proto | 37 +- crates/proto/src/proto.rs | 7 +- crates/quick_action_bar/src/repl_menu.rs | 2 +- crates/recent_projects/src/ssh_connections.rs | 21 +- crates/remote/src/ssh_session.rs | 10 +- crates/remote_server/src/headless_project.rs | 29 +- .../remote_server/src/remote_editing_tests.rs | 122 +- crates/repl/src/repl_editor.rs | 8 +- crates/worktree/src/worktree.rs | 7 + crates/zed/src/zed.rs | 20 +- 45 files changed, 1548 insertions(+), 666 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 22843d41cd6207..7eebc97b1d91e8 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -53,7 +53,8 @@ use language_model::{ }; use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; -use project::{Project, ProjectLspAdapterDelegate, Worktree}; +use project::lsp_store::ProjectLspAdapterDelegate; +use project::{Project, Worktree}; use search::{buffer_search::DivRegistrar, BufferSearchBar}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings}; @@ -5340,9 +5341,17 @@ fn make_lsp_adapter_delegate( .worktrees(cx) .next() .ok_or_else(|| anyhow!("no worktrees when constructing ProjectLspAdapterDelegate"))?; + let fs = if project.is_local() { + Some(project.fs().clone()) + } else { + None + }; + let http_client = project.client().http_client().clone(); project.lsp_store().update(cx, |lsp_store, cx| { - Ok(ProjectLspAdapterDelegate::new(lsp_store, &worktree, cx) - as Arc) + Ok( + ProjectLspAdapterDelegate::new(lsp_store, &worktree, http_client, fs, cx) + as Arc, + ) }) }) } diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 7bd74ccabf4763..051db0f2476bf3 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -2377,7 +2377,7 @@ impl Codegen { // If Markdown or No Language is Known, increase the randomness for more creative output // If Code, decrease temperature to get more deterministic outputs let temperature = if let Some(language) = language_name.clone() { - if language.as_ref() == "Markdown" { + if language == "Markdown".into() { 1.0 } else { 0.5 @@ -2386,7 +2386,7 @@ impl Codegen { 1.0 }; - let language_name = language_name.as_deref(); + let language_name = language_name.as_ref(); let start = buffer.point_to_buffer_offset(edit_range.start); let end = buffer.point_to_buffer_offset(edit_range.end); let (buffer, range) = if let Some((start, end)) = start.zip(end) { diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 068bf7158de08c..83e894f7978746 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -4,7 +4,7 @@ use fs::Fs; use futures::StreamExt; use gpui::AssetSource; use handlebars::{Handlebars, RenderError}; -use language::BufferSnapshot; +use language::{BufferSnapshot, LanguageName}; use parking_lot::Mutex; use serde::Serialize; use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration}; @@ -204,11 +204,11 @@ impl PromptBuilder { pub fn generate_content_prompt( &self, user_prompt: String, - language_name: Option<&str>, + language_name: Option<&LanguageName>, buffer: BufferSnapshot, range: Range, ) -> Result { - let content_type = match language_name { + let content_type = match language_name.as_ref().map(|l| l.0.as_ref()) { None | Some("Markdown" | "Plain Text") => "text", Some(_) => "code", }; diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index e012fce8c26794..b6d7aca2e078a0 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -2328,11 +2328,11 @@ async fn test_propagate_saves_and_fs_changes( .unwrap(); buffer_b.read_with(cx_b, |buffer, _| { - assert_eq!(&*buffer.language().unwrap().name(), "Rust"); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); }); buffer_c.read_with(cx_c, |buffer, _| { - assert_eq!(&*buffer.language().unwrap().name(), "Rust"); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); }); buffer_b.update(cx_b, |buf, cx| buf.edit([(0..0, "i-am-b, ")], None, cx)); buffer_c.update(cx_c, |buf, cx| buf.edit([(0..0, "i-am-c, ")], None, cx)); @@ -2432,17 +2432,17 @@ async fn test_propagate_saves_and_fs_changes( buffer_a.read_with(cx_a, |buffer, _| { assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); - assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); buffer_b.read_with(cx_b, |buffer, _| { assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); - assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); buffer_c.read_with(cx_c, |buffer, _| { assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); - assert_eq!(&*buffer.language().unwrap().name(), "JavaScript"); + assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); let new_buffer_a = project_a diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 21e7f9dd9e87f6..c4410fd776be7d 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -100,7 +100,7 @@ async fn test_sharing_an_ssh_remote_project( let file = buffer_b.read(cx).file(); assert_eq!( all_language_settings(file, cx) - .language(Some("Rust")) + .language(Some(&("Rust".into()))) .language_servers, ["override-rust-analyzer".into()] ) diff --git a/crates/editor/src/clangd_ext.rs b/crates/editor/src/clangd_ext.rs index 7fbb8f5f41b11d..2f0f7aaee47e7a 100644 --- a/crates/editor/src/clangd_ext.rs +++ b/crates/editor/src/clangd_ext.rs @@ -12,7 +12,7 @@ use crate::{element::register_action, Editor, SwitchSourceHeader}; static CLANGD_SERVER_NAME: &str = "clangd"; fn is_c_language(language: &Language) -> bool { - return language.name().as_ref() == "C++" || language.name().as_ref() == "C"; + return language.name() == "C++".into() || language.name() == "C".into(); } pub fn switch_source_header( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index cb4ae63afcd9a6..3466888c942de1 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12465,7 +12465,7 @@ fn inlay_hint_settings( let language = snapshot.language_at(location); let settings = all_language_settings(file, cx); settings - .language(language.map(|l| l.name()).as_deref()) + .language(language.map(|l| l.name()).as_ref()) .inlay_hints } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index c8c509fd984927..0b1e0385ded4ad 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -20,8 +20,8 @@ use language::{ }, BracketPairConfig, Capability::ReadWrite, - FakeLspAdapter, IndentGuide, LanguageConfig, LanguageConfigOverride, LanguageMatcher, Override, - ParsedMarkdown, Point, + FakeLspAdapter, IndentGuide, LanguageConfig, LanguageConfigOverride, LanguageMatcher, + LanguageName, Override, ParsedMarkdown, Point, }; use language_settings::{Formatter, FormatterList, IndentGuideSettings}; use multi_buffer::MultiBufferIndentGuide; @@ -9587,12 +9587,12 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test let server_restarts = Arc::new(AtomicUsize::new(0)); let closure_restarts = Arc::clone(&server_restarts); let language_server_name = "test language server"; - let language_name: Arc = "Rust".into(); + let language_name: LanguageName = "Rust".into(); let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(Arc::new(Language::new( LanguageConfig { - name: Arc::clone(&language_name), + name: language_name.clone(), matcher: LanguageMatcher { path_suffixes: vec!["rs".to_string()], ..Default::default() @@ -9629,7 +9629,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test let _fake_server = fake_servers.next().await.unwrap(); update_test_language_settings(cx, |language_settings| { language_settings.languages.insert( - Arc::clone(&language_name), + language_name.clone(), LanguageSettingsContent { tab_size: NonZeroU32::new(8), ..Default::default() diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 23293469dd4e58..1be2092d7dd79b 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1705,8 +1705,8 @@ mod tests { let buffer = editor.buffer().read(cx).as_singleton().unwrap().read(cx); assert_eq!( - buffer.language().map(|lang| lang.name()).as_deref(), - Some("Rust") + buffer.language().map(|lang| lang.name()), + Some("Rust".into()) ); // Language should be set to Rust assert!(buffer.file().is_none()); // The buffer should not have an associated file }); diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index a152f3c453b4a6..db17eaab282fae 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -13,7 +13,7 @@ use crate::{ static RUST_ANALYZER_NAME: &str = "rust-analyzer"; fn is_rust_language(language: &Language) -> bool { - language.name().as_ref() == "Rust" + language.name() == "Rust".into() } pub fn apply_related_actions(editor: &View, cx: &mut WindowContext) { diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index ec1eccb8643f93..16735760bff5bf 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -58,7 +58,7 @@ impl EditorLspTestContext { let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let mut fake_servers = language_registry.register_fake_lsp_adapter( - language.name().as_ref(), + language.name(), FakeLspAdapter { capabilities, ..Default::default() diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs index 41a35cb6177f35..f82b6c9e0e9eb4 100644 --- a/crates/extension/src/extension_lsp_adapter.rs +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -38,7 +38,6 @@ impl LspAdapter for ExtensionLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Arc, _: Arc, delegate: Arc, _: futures::lock::MutexGuard<'a, Option>, diff --git a/crates/extension/src/extension_manifest.rs b/crates/extension/src/extension_manifest.rs index 9d8a841686feb5..3dfd7e0d41f34b 100644 --- a/crates/extension/src/extension_manifest.rs +++ b/crates/extension/src/extension_manifest.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Context, Result}; use collections::{BTreeMap, HashMap}; use fs::Fs; -use language::LanguageServerName; +use language::{LanguageName, LanguageServerName}; use semantic_version::SemanticVersion; use serde::{Deserialize, Serialize}; use std::{ @@ -106,10 +106,10 @@ pub struct GrammarManifestEntry { pub struct LanguageServerManifestEntry { /// Deprecated in favor of `languages`. #[serde(default)] - language: Option>, + language: Option, /// The list of languages this language server should work with. #[serde(default)] - languages: Vec>, + languages: Vec, #[serde(default)] pub language_ids: HashMap, #[serde(default)] @@ -124,7 +124,7 @@ impl LanguageServerManifestEntry { /// /// We can replace this with just field access for the `languages` field once /// we have removed `language`. - pub fn languages(&self) -> impl IntoIterator> + '_ { + pub fn languages(&self) -> impl IntoIterator + '_ { let language = if self.languages.is_empty() { self.language.clone() } else { diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 2558dca93ea117..3ebc4f20d36975 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -36,7 +36,8 @@ use gpui::{ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use indexed_docs::{IndexedDocsRegistry, ProviderId}; use language::{ - LanguageConfig, LanguageMatcher, LanguageQueries, LanguageRegistry, QUERY_FILENAME_PREFIXES, + LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LanguageRegistry, + QUERY_FILENAME_PREFIXES, }; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; @@ -148,7 +149,7 @@ impl Global for GlobalExtensionStore {} pub struct ExtensionIndex { pub extensions: BTreeMap, ExtensionIndexEntry>, pub themes: BTreeMap, ExtensionIndexThemeEntry>, - pub languages: BTreeMap, ExtensionIndexLanguageEntry>, + pub languages: BTreeMap, } #[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)] diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 70ea7ac9093bc2..da530306d17101 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -609,7 +609,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { .await .unwrap(); - let mut fake_servers = language_registry.fake_language_servers("Gleam"); + let mut fake_servers = language_registry.fake_language_servers("Gleam".into()); let buffer = project .update(cx, |project, cx| { diff --git a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs index 68550a44cfdfc7..337bb8afb04a1d 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs @@ -9,6 +9,7 @@ use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; use isahc::config::{Configurable, RedirectPolicy}; +use language::LanguageName; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; @@ -399,8 +400,9 @@ impl ExtensionImports for WasmState { cx.update(|cx| match category.as_str() { "language" => { + let key = key.map(|k| LanguageName::new(&k)); let settings = - AllLanguageSettings::get(location, cx).language(key.as_deref()); + AllLanguageSettings::get(location, cx).language(key.as_ref()); Ok(serde_json::to_string(&settings::LanguageSettings { tab_size: settings.tab_size, })?) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 564b8934897734..ac7d5eb47bb99e 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1504,3 +1504,9 @@ pub struct KeystrokeEvent { /// The action that was resolved for the keystroke, if any pub action: Option>, } + +impl Drop for AppContext { + fn drop(&mut self) { + println!("Dropping the App Context"); + } +} diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 8584eee4c7d493..77a1079d3ad56f 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -72,7 +72,7 @@ fn test_select_language(cx: &mut AppContext) { let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); registry.add(Arc::new(Language::new( LanguageConfig { - name: "Rust".into(), + name: LanguageName::new("Rust"), matcher: LanguageMatcher { path_suffixes: vec!["rs".to_string()], ..Default::default() @@ -83,7 +83,7 @@ fn test_select_language(cx: &mut AppContext) { ))); registry.add(Arc::new(Language::new( LanguageConfig { - name: "Make".into(), + name: LanguageName::new("Make"), matcher: LanguageMatcher { path_suffixes: vec!["Makefile".to_string(), "mk".to_string()], ..Default::default() @@ -97,15 +97,13 @@ fn test_select_language(cx: &mut AppContext) { assert_eq!( registry .language_for_file(&file("src/lib.rs"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), Some("Rust".into()) ); assert_eq!( registry .language_for_file(&file("src/lib.mk"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), Some("Make".into()) ); @@ -113,8 +111,7 @@ fn test_select_language(cx: &mut AppContext) { assert_eq!( registry .language_for_file(&file("src/Makefile"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), Some("Make".into()) ); @@ -122,22 +119,19 @@ fn test_select_language(cx: &mut AppContext) { assert_eq!( registry .language_for_file(&file("zed/cars"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), None ); assert_eq!( registry .language_for_file(&file("zed/a.cars"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), None ); assert_eq!( registry .language_for_file(&file("zed/sumk"), None, cx) - .now_or_never() - .and_then(|l| Some(l.ok()?.name())), + .map(|l| l.name()), None ); } @@ -158,23 +152,22 @@ async fn test_first_line_pattern(cx: &mut TestAppContext) { ..Default::default() }); - cx.read(|cx| languages.language_for_file(&file("the/script"), None, cx)) - .await - .unwrap_err(); - cx.read(|cx| languages.language_for_file(&file("the/script"), Some(&"nothing".into()), cx)) - .await - .unwrap_err(); + assert!(cx + .read(|cx| languages.language_for_file(&file("the/script"), None, cx)) + .is_none()); + assert!(cx + .read(|cx| languages.language_for_file(&file("the/script"), Some(&"nothing".into()), cx)) + .is_none()); + assert_eq!( cx.read(|cx| languages.language_for_file( &file("the/script"), Some(&"#!/bin/env node".into()), cx )) - .await .unwrap() - .name() - .as_ref(), - "JavaScript" + .name(), + "JavaScript".into() ); } @@ -242,19 +235,16 @@ async fn test_language_for_file_with_custom_file_types(cx: &mut TestAppContext) let language = cx .read(|cx| languages.language_for_file(&file("foo.js"), None, cx)) - .await .unwrap(); - assert_eq!(language.name().as_ref(), "TypeScript"); + assert_eq!(language.name(), "TypeScript".into()); let language = cx .read(|cx| languages.language_for_file(&file("foo.c"), None, cx)) - .await .unwrap(); - assert_eq!(language.name().as_ref(), "C++"); + assert_eq!(language.name(), "C++".into()); let language = cx .read(|cx| languages.language_for_file(&file("Dockerfile.dev"), None, cx)) - .await .unwrap(); - assert_eq!(language.name().as_ref(), "Dockerfile"); + assert_eq!(language.name(), "Dockerfile".into()); } fn file(path: &str) -> Arc { @@ -2245,10 +2235,10 @@ fn test_language_at_with_hidden_languages(cx: &mut AppContext) { for point in [Point::new(0, 4), Point::new(0, 16)] { let config = snapshot.language_scope_at(point).unwrap(); - assert_eq!(config.language_name().as_ref(), "Markdown"); + assert_eq!(config.language_name(), "Markdown".into()); let language = snapshot.language_at(point).unwrap(); - assert_eq!(language.name().as_ref(), "Markdown"); + assert_eq!(language.name().0.as_ref(), "Markdown"); } buffer @@ -2757,7 +2747,7 @@ fn ruby_lang() -> Language { fn html_lang() -> Language { Language::new( LanguageConfig { - name: "HTML".into(), + name: LanguageName::new("HTML"), block_comment: Some(("".into())), ..Default::default() }, diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 7e8fcc655dc43f..6424da8a54280e 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -28,6 +28,7 @@ use futures::Future; use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task}; pub use highlight_map::HighlightMap; use http_client::HttpClient; +pub use language_registry::LanguageName; use lsp::{CodeActionKind, LanguageServerBinary}; use parking_lot::Mutex; use regex::Regex; @@ -67,8 +68,8 @@ pub use buffer::Operation; pub use buffer::*; pub use diagnostic_set::DiagnosticEntry; pub use language_registry::{ - LanguageNotFound, LanguageQueries, LanguageRegistry, LanguageServerBinaryStatus, - PendingLanguageServer, QUERY_FILENAME_PREFIXES, + AvailableLanguage, LanguageNotFound, LanguageQueries, LanguageRegistry, + LanguageServerBinaryStatus, PendingLanguageServer, QUERY_FILENAME_PREFIXES, }; pub use lsp::LanguageServerId; pub use outline::*; @@ -140,6 +141,12 @@ pub trait ToLspPosition { #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] pub struct LanguageServerName(pub Arc); +impl LanguageServerName { + pub fn from_proto(s: String) -> Self { + Self(Arc::from(s)) + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Location { pub buffer: Model, @@ -195,9 +202,12 @@ impl CachedLspAdapter { }) } + pub fn name(&self) -> Arc { + self.adapter.name().0.clone() + } + pub async fn get_language_server_command( self: Arc, - language: Arc, container_dir: Arc, delegate: Arc, cx: &mut AsyncAppContext, @@ -205,18 +215,10 @@ impl CachedLspAdapter { let cached_binary = self.cached_binary.lock().await; self.adapter .clone() - .get_language_server_command(language, container_dir, delegate, cached_binary, cx) + .get_language_server_command(container_dir, delegate, cached_binary, cx) .await } - pub fn will_start_server( - &self, - delegate: &Arc, - cx: &mut AsyncAppContext, - ) -> Option>> { - self.adapter.will_start_server(delegate, cx) - } - pub fn can_be_reinstalled(&self) -> bool { self.adapter.can_be_reinstalled() } @@ -262,11 +264,11 @@ impl CachedLspAdapter { .await } - pub fn language_id(&self, language: &Language) -> String { + pub fn language_id(&self, language_name: &LanguageName) -> String { self.language_ids - .get(language.name().as_ref()) + .get(language_name.0.as_ref()) .cloned() - .unwrap_or_else(|| language.lsp_id()) + .unwrap_or_else(|| language_name.lsp_id()) } #[cfg(any(test, feature = "test-support"))] @@ -296,7 +298,6 @@ pub trait LspAdapter: 'static + Send + Sync { fn get_language_server_command<'a>( self: Arc, - language: Arc, container_dir: Arc, delegate: Arc, mut cached_binary: futures::lock::MutexGuard<'a, Option>, @@ -317,7 +318,7 @@ pub trait LspAdapter: 'static + Send + Sync { if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), cx).await { log::info!( "found user-installed language server for {}. path: {:?}, arguments: {:?}", - language.name(), + self.name().0, binary.path, binary.arguments ); @@ -387,14 +388,6 @@ pub trait LspAdapter: 'static + Send + Sync { None } - fn will_start_server( - &self, - _: &Arc, - _: &mut AsyncAppContext, - ) -> Option>> { - None - } - async fn fetch_server_binary( &self, latest_version: Box, @@ -562,7 +555,7 @@ pub struct CodeLabel { #[derive(Clone, Deserialize, JsonSchema)] pub struct LanguageConfig { /// Human-readable name of the language. - pub name: Arc, + pub name: LanguageName, /// The name of this language for a Markdown code fence block pub code_fence_block_name: Option>, // The name of the grammar in a WASM bundle (experimental). @@ -699,7 +692,7 @@ impl Override { impl Default for LanguageConfig { fn default() -> Self { Self { - name: Arc::default(), + name: LanguageName::new(""), code_fence_block_name: None, grammar: None, matcher: LanguageMatcher::default(), @@ -1335,7 +1328,7 @@ impl Language { Arc::get_mut(self.grammar.as_mut()?) } - pub fn name(&self) -> Arc { + pub fn name(&self) -> LanguageName { self.config.name.clone() } @@ -1343,7 +1336,7 @@ impl Language { self.config .code_fence_block_name .clone() - .unwrap_or_else(|| self.config.name.to_lowercase().into()) + .unwrap_or_else(|| self.config.name.0.to_lowercase().into()) } pub fn context_provider(&self) -> Option> { @@ -1408,10 +1401,7 @@ impl Language { } pub fn lsp_id(&self) -> String { - match self.config.name.as_ref() { - "Plain Text" => "plaintext".to_string(), - language_name => language_name.to_lowercase(), - } + self.config.name.lsp_id() } pub fn prettier_parser_name(&self) -> Option<&str> { @@ -1420,7 +1410,7 @@ impl Language { } impl LanguageScope { - pub fn language_name(&self) -> Arc { + pub fn language_name(&self) -> LanguageName { self.language.config.name.clone() } @@ -1663,7 +1653,6 @@ impl LspAdapter for FakeLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Arc, _: Arc, _: Arc, _: futures::lock::MutexGuard<'a, Option>, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index a558b942d69390..a65d20019f3da0 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -6,9 +6,9 @@ use crate::{ with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher, LanguageServerName, LspAdapter, LspAdapterDelegate, PLAIN_TEXT, }; -use anyhow::{anyhow, Context as _, Result}; +use anyhow::{anyhow, Context, Result}; use collections::{hash_map, HashMap, HashSet}; -use futures::TryFutureExt; + use futures::{ channel::{mpsc, oneshot}, future::Shared, @@ -19,8 +19,10 @@ use gpui::{AppContext, BackgroundExecutor, Task}; use lsp::LanguageServerId; use parking_lot::{Mutex, RwLock}; use postage::watch; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; use std::{ - borrow::Cow, + borrow::{Borrow, Cow}, ffi::OsStr, ops::Not, path::{Path, PathBuf}, @@ -32,6 +34,48 @@ use theme::Theme; use unicase::UniCase; use util::{maybe, paths::PathExt, post_inc, ResultExt}; +#[derive( + Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, JsonSchema, +)] +pub struct LanguageName(pub Arc); + +impl LanguageName { + pub fn new(s: &str) -> Self { + Self(Arc::from(s)) + } + + pub fn from_proto(s: String) -> Self { + Self(Arc::from(s)) + } + pub fn to_proto(self) -> String { + self.0.to_string() + } + pub fn lsp_id(&self) -> String { + match self.0.as_ref() { + "Plain Text" => "plaintext".to_string(), + language_name => language_name.to_lowercase(), + } + } +} + +impl Borrow for LanguageName { + fn borrow(&self) -> &str { + self.0.as_ref() + } +} + +impl std::fmt::Display for LanguageName { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl<'a> From<&'a str> for LanguageName { + fn from(str: &'a str) -> LanguageName { + LanguageName(str.into()) + } +} + pub struct LanguageRegistry { state: RwLock, language_server_download_dir: Option>, @@ -46,7 +90,7 @@ struct LanguageRegistryState { language_settings: AllLanguageSettingsContent, available_languages: Vec, grammars: HashMap, AvailableGrammar>, - lsp_adapters: HashMap, Vec>>, + lsp_adapters: HashMap>>, available_lsp_adapters: HashMap Arc + 'static + Send + Sync>>, loading_languages: HashMap>>>>, @@ -56,8 +100,10 @@ struct LanguageRegistryState { reload_count: usize, #[cfg(any(test, feature = "test-support"))] - fake_server_txs: - HashMap, Vec>>, + fake_server_txs: HashMap< + LanguageName, + Vec>, + >, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -75,9 +121,9 @@ pub struct PendingLanguageServer { } #[derive(Clone)] -struct AvailableLanguage { +pub struct AvailableLanguage { id: LanguageId, - name: Arc, + name: LanguageName, grammar: Option>, matcher: LanguageMatcher, load: Arc< @@ -93,6 +139,16 @@ struct AvailableLanguage { loaded: bool, } +impl AvailableLanguage { + pub fn name(&self) -> LanguageName { + self.name.clone() + } + + pub fn matcher(&self) -> &LanguageMatcher { + &self.matcher + } +} + enum AvailableGrammar { Native(tree_sitter::Language), Loaded(#[allow(unused)] PathBuf, tree_sitter::Language), @@ -196,7 +252,7 @@ impl LanguageRegistry { /// appended to the end. pub fn reorder_language_servers( &self, - language: &Arc, + language: &LanguageName, ordered_lsp_adapters: Vec>, ) { self.state @@ -207,7 +263,7 @@ impl LanguageRegistry { /// Removes the specified languages and grammars from the registry. pub fn remove_languages( &self, - languages_to_remove: &[Arc], + languages_to_remove: &[LanguageName], grammars_to_remove: &[Arc], ) { self.state @@ -215,7 +271,7 @@ impl LanguageRegistry { .remove_languages(languages_to_remove, grammars_to_remove) } - pub fn remove_lsp_adapter(&self, language_name: &str, name: &LanguageServerName) { + pub fn remove_lsp_adapter(&self, language_name: &LanguageName, name: &LanguageServerName) { let mut state = self.state.write(); if let Some(adapters) = state.lsp_adapters.get_mut(language_name) { adapters.retain(|adapter| &adapter.name != name) @@ -267,7 +323,7 @@ impl LanguageRegistry { Some(load_lsp_adapter()) } - pub fn register_lsp_adapter(&self, language_name: Arc, adapter: Arc) { + pub fn register_lsp_adapter(&self, language_name: LanguageName, adapter: Arc) { self.state .write() .lsp_adapters @@ -279,13 +335,14 @@ impl LanguageRegistry { #[cfg(any(feature = "test-support", test))] pub fn register_fake_lsp_adapter( &self, - language_name: &str, + language_name: impl Into, adapter: crate::FakeLspAdapter, ) -> futures::channel::mpsc::UnboundedReceiver { + let language_name = language_name.into(); self.state .write() .lsp_adapters - .entry(language_name.into()) + .entry(language_name.clone()) .or_default() .push(CachedLspAdapter::new(Arc::new(adapter))); self.fake_language_servers(language_name) @@ -294,13 +351,13 @@ impl LanguageRegistry { #[cfg(any(feature = "test-support", test))] pub fn fake_language_servers( &self, - language_name: &str, + language_name: LanguageName, ) -> futures::channel::mpsc::UnboundedReceiver { let (servers_tx, servers_rx) = futures::channel::mpsc::unbounded(); self.state .write() .fake_server_txs - .entry(language_name.into()) + .entry(language_name) .or_default() .push(servers_tx); servers_rx @@ -309,7 +366,7 @@ impl LanguageRegistry { /// Adds a language to the registry, which can be loaded if needed. pub fn register_language( &self, - name: Arc, + name: LanguageName, grammar_name: Option>, matcher: LanguageMatcher, load: impl Fn() -> Result<( @@ -445,7 +502,7 @@ impl LanguageRegistry { ) -> impl Future>> { let name = UniCase::new(name); let rx = self.get_or_load_language(|language_name, _| { - if UniCase::new(language_name) == name { + if UniCase::new(&language_name.0) == name { 1 } else { 0 @@ -460,7 +517,7 @@ impl LanguageRegistry { ) -> impl Future>> { let string = UniCase::new(string); let rx = self.get_or_load_language(|name, config| { - if UniCase::new(name) == string + if UniCase::new(&name.0) == string || config .path_suffixes .iter() @@ -474,13 +531,26 @@ impl LanguageRegistry { async move { rx.await? } } + pub fn available_language_for_name( + self: &Arc, + name: &LanguageName, + ) -> Option { + let state = self.state.read(); + state + .available_languages + .iter() + .find(|l| &l.name == name) + .cloned() + } + pub fn language_for_file( self: &Arc, file: &Arc, content: Option<&Rope>, cx: &AppContext, - ) -> impl Future>> { + ) -> Option { let user_file_types = all_language_settings(Some(file), cx); + self.language_for_file_internal( &file.full_path(cx), content, @@ -492,8 +562,16 @@ impl LanguageRegistry { self: &Arc, path: &'a Path, ) -> impl Future>> + 'a { - self.language_for_file_internal(path, None, None) - .map_err(|error| error.context(format!("language for file path {}", path.display()))) + let available_language = self.language_for_file_internal(path, None, None); + + let this = self.clone(); + async move { + if let Some(language) = available_language { + this.load_language(&language).await? + } else { + Err(anyhow!(LanguageNotFound)) + } + } } fn language_for_file_internal( @@ -501,19 +579,19 @@ impl LanguageRegistry { path: &Path, content: Option<&Rope>, user_file_types: Option<&HashMap, GlobSet>>, - ) -> impl Future>> { + ) -> Option { let filename = path.file_name().and_then(|name| name.to_str()); let extension = path.extension_or_hidden_file_name(); let path_suffixes = [extension, filename, path.to_str()]; let empty = GlobSet::empty(); - let rx = self.get_or_load_language(move |language_name, config| { + self.find_matching_language(move |language_name, config| { let path_matches_default_suffix = config .path_suffixes .iter() .any(|suffix| path_suffixes.contains(&Some(suffix.as_str()))); let custom_suffixes = user_file_types - .and_then(|types| types.get(language_name)) + .and_then(|types| types.get(&language_name.0)) .unwrap_or(&empty); let path_matches_custom_suffix = path_suffixes .iter() @@ -535,18 +613,15 @@ impl LanguageRegistry { } else { 0 } - }); - async move { rx.await? } + }) } - fn get_or_load_language( + fn find_matching_language( self: &Arc, - callback: impl Fn(&str, &LanguageMatcher) -> usize, - ) -> oneshot::Receiver>> { - let (tx, rx) = oneshot::channel(); - - let mut state = self.state.write(); - let Some((language, _)) = state + callback: impl Fn(&LanguageName, &LanguageMatcher) -> usize, + ) -> Option { + let state = self.state.read(); + let available_language = state .available_languages .iter() .filter_map(|language| { @@ -559,15 +634,23 @@ impl LanguageRegistry { }) .max_by_key(|e| e.1) .clone() - else { - let _ = tx.send(Err(anyhow!(LanguageNotFound))); - return rx; - }; + .map(|(available_language, _)| available_language); + drop(state); + available_language + } + + pub fn load_language( + self: &Arc, + language: &AvailableLanguage, + ) -> oneshot::Receiver>> { + let (tx, rx) = oneshot::channel(); + + let mut state = self.state.write(); // If the language is already loaded, resolve with it immediately. for loaded_language in state.languages.iter() { if loaded_language.id == language.id { - let _ = tx.send(Ok(loaded_language.clone())); + tx.send(Ok(loaded_language.clone())).unwrap(); return rx; } } @@ -580,12 +663,15 @@ impl LanguageRegistry { // Otherwise, start loading the language. hash_map::Entry::Vacant(entry) => { let this = self.clone(); + + let id = language.id; + let name = language.name.clone(); + let language_load = language.load.clone(); + self.executor .spawn(async move { - let id = language.id; - let name = language.name.clone(); let language = async { - let (config, queries, provider) = (language.load)()?; + let (config, queries, provider) = (language_load)()?; if let Some(grammar) = config.grammar.clone() { let grammar = Some(this.get_or_load_grammar(grammar).await?); @@ -629,13 +715,28 @@ impl LanguageRegistry { }; }) .detach(); + entry.insert(vec![tx]); } } + drop(state); rx } + fn get_or_load_language( + self: &Arc, + callback: impl Fn(&LanguageName, &LanguageMatcher) -> usize, + ) -> oneshot::Receiver>> { + let Some(language) = self.find_matching_language(callback) else { + let (tx, rx) = oneshot::channel(); + let _ = tx.send(Err(anyhow!(LanguageNotFound))); + return rx; + }; + + self.load_language(&language) + } + fn get_or_load_grammar( self: &Arc, name: Arc, @@ -702,11 +803,11 @@ impl LanguageRegistry { self.state.read().languages.to_vec() } - pub fn lsp_adapters(&self, language: &Arc) -> Vec> { + pub fn lsp_adapters(&self, language_name: &LanguageName) -> Vec> { self.state .read() .lsp_adapters - .get(&language.config.name) + .get(language_name) .cloned() .unwrap_or_default() } @@ -723,7 +824,7 @@ impl LanguageRegistry { pub fn create_pending_language_server( self: &Arc, stderr_capture: Arc>>, - language: Arc, + _language_name_for_tests: LanguageName, adapter: Arc, root_path: Arc, delegate: Arc, @@ -741,7 +842,6 @@ impl LanguageRegistry { .clone() .ok_or_else(|| anyhow!("language server download directory has not been assigned before starting server")) .log_err()?; - let language = language.clone(); let container_dir: Arc = Arc::from(download_dir.join(adapter.name.0.as_ref())); let root_path = root_path.clone(); let login_shell_env_loaded = self.login_shell_env_loaded.clone(); @@ -756,12 +856,7 @@ impl LanguageRegistry { let binary_result = adapter .clone() - .get_language_server_command( - language.clone(), - container_dir, - delegate.clone(), - &mut cx, - ) + .get_language_server_command(container_dir, delegate.clone(), &mut cx) .await; delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None); @@ -785,10 +880,6 @@ impl LanguageRegistry { .initialization_options(&delegate) .await?; - if let Some(task) = adapter.will_start_server(&delegate, &mut cx) { - task.await?; - } - #[cfg(any(test, feature = "test-support"))] if true { let capabilities = adapter @@ -825,7 +916,7 @@ impl LanguageRegistry { .state .write() .fake_server_txs - .get_mut(language.name().as_ref()) + .get_mut(&_language_name_for_tests) { for tx in txs { tx.unbounded_send(fake_server.clone()).ok(); @@ -935,10 +1026,10 @@ impl LanguageRegistryState { /// appended to the end. fn reorder_language_servers( &mut self, - language: &Arc, + language_name: &LanguageName, ordered_lsp_adapters: Vec>, ) { - let Some(lsp_adapters) = self.lsp_adapters.get_mut(&language.config.name) else { + let Some(lsp_adapters) = self.lsp_adapters.get_mut(language_name) else { return; }; @@ -959,7 +1050,7 @@ impl LanguageRegistryState { fn remove_languages( &mut self, - languages_to_remove: &[Arc], + languages_to_remove: &[LanguageName], grammars_to_remove: &[Arc], ) { if languages_to_remove.is_empty() && grammars_to_remove.is_empty() { diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index ac3c9eb6ca4e08..e1fcaaba28b4f1 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -1,6 +1,6 @@ //! Provides `language`-related settings. -use crate::{File, Language, LanguageServerName}; +use crate::{File, Language, LanguageName, LanguageServerName}; use anyhow::Result; use collections::{HashMap, HashSet}; use core::slice; @@ -32,7 +32,7 @@ pub fn language_settings<'a>( cx: &'a AppContext, ) -> &'a LanguageSettings { let language_name = language.map(|l| l.name()); - all_language_settings(file, cx).language(language_name.as_deref()) + all_language_settings(file, cx).language(language_name.as_ref()) } /// Returns the settings for all languages from the provided file. @@ -53,7 +53,7 @@ pub struct AllLanguageSettings { /// The inline completion settings. pub inline_completions: InlineCompletionSettings, defaults: LanguageSettings, - languages: HashMap, LanguageSettings>, + languages: HashMap, pub(crate) file_types: HashMap, GlobSet>, } @@ -204,7 +204,7 @@ pub struct AllLanguageSettingsContent { pub defaults: LanguageSettingsContent, /// The settings for individual languages. #[serde(default)] - pub languages: HashMap, LanguageSettingsContent>, + pub languages: HashMap, /// Settings for associating file extensions and filenames /// with languages. #[serde(default)] @@ -791,7 +791,7 @@ impl InlayHintSettings { impl AllLanguageSettings { /// Returns the [`LanguageSettings`] for the language with the specified name. - pub fn language<'a>(&'a self, language_name: Option<&str>) -> &'a LanguageSettings { + pub fn language<'a>(&'a self, language_name: Option<&LanguageName>) -> &'a LanguageSettings { if let Some(name) = language_name { if let Some(overrides) = self.languages.get(name) { return overrides; @@ -821,7 +821,7 @@ impl AllLanguageSettings { } } - self.language(language.map(|l| l.name()).as_deref()) + self.language(language.map(|l| l.name()).as_ref()) .show_inline_completions } } diff --git a/crates/language_selector/src/active_buffer_language.rs b/crates/language_selector/src/active_buffer_language.rs index 647ff93b818f37..6aa31d7ff81ae8 100644 --- a/crates/language_selector/src/active_buffer_language.rs +++ b/crates/language_selector/src/active_buffer_language.rs @@ -1,13 +1,13 @@ use editor::Editor; use gpui::{div, IntoElement, ParentElement, Render, Subscription, View, ViewContext, WeakView}; -use std::sync::Arc; +use language::LanguageName; use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, Tooltip}; use workspace::{item::ItemHandle, StatusItemView, Workspace}; use crate::LanguageSelector; pub struct ActiveBufferLanguage { - active_language: Option>>, + active_language: Option>, workspace: WeakView, _observe_active_editor: Option, } diff --git a/crates/language_selector/src/language_selector.rs b/crates/language_selector/src/language_selector.rs index 6bdf5a67d00a28..489f6fd141b927 100644 --- a/crates/language_selector/src/language_selector.rs +++ b/crates/language_selector/src/language_selector.rs @@ -217,7 +217,7 @@ impl PickerDelegate for LanguageSelectorDelegate { let mat = &self.matches[ix]; let buffer_language_name = self.buffer.read(cx).language().map(|l| l.name()); let mut label = mat.string.clone(); - if buffer_language_name.as_deref() == Some(mat.string.as_str()) { + if buffer_language_name.map(|n| n.0).as_deref() == Some(mat.string.as_str()) { label.push_str(" (current)"); } diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index 5cf800d306e8e8..53def5eb2a1195 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -683,7 +683,7 @@ impl LspLogView { self.project .read(cx) .supplementary_language_servers(cx) - .filter_map(|(&server_id, name)| { + .filter_map(|(server_id, name)| { let state = log_store.language_servers.get(&server_id)?; Some(LogMenuItem { server_id, diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index 1d98c3d0b0b82f..e2c4903e191cef 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -471,7 +471,7 @@ impl SyntaxTreeToolbarItemView { fn render_header(active_layer: &OwnedSyntaxLayer) -> ButtonLike { ButtonLike::new("syntax tree header") - .child(Label::new(active_layer.language.name())) + .child(Label::new(active_layer.language.name().0)) .child(Label::new(format_node_range(active_layer.node()))) } } diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 6ed20abe17b038..46b6ce475d5283 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -451,7 +451,7 @@ impl ContextProvider for RustContextProvider { ) -> Option { const DEFAULT_RUN_NAME_STR: &str = "RUST_DEFAULT_PACKAGE_RUN"; let package_to_run = all_language_settings(file.as_ref(), cx) - .language(Some("Rust")) + .language(Some(&"Rust".into())) .tasks .variables .get(DEFAULT_RUN_NAME_STR); diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 4f0270fb266cb5..51a9913b249e9f 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -141,7 +141,7 @@ impl LspAdapter for YamlLspAdapter { let tab_size = cx.update(|cx| { AllLanguageSettings::get(Some(location), cx) - .language(Some("YAML")) + .language(Some(&"YAML".into())) .tab_size })?; let mut options = serde_json::json!({"[yaml]": {"editor.tabSize": tab_size}}); diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 30feffad971b7c..061291757512bf 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -89,6 +89,16 @@ pub struct LanguageServer { #[repr(transparent)] pub struct LanguageServerId(pub usize); +impl LanguageServerId { + pub fn from_proto(id: u64) -> Self { + Self(id as usize) + } + + pub fn to_proto(self) -> u64 { + self.0 as u64 + } +} + /// Handle to a language server RPC activity subscription. pub enum Subscription { Notification { diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index d73e2054837b49..1aa60e2a3b71b9 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -282,7 +282,7 @@ impl MarkdownPreviewView { let buffer = editor.read(cx).buffer().read(cx); if let Some(buffer) = buffer.as_singleton() { if let Some(language) = buffer.read(cx).language() { - return language.name().as_ref() == "Markdown"; + return language.name() == "Markdown".into(); } } false diff --git a/crates/project/src/lsp_command/signature_help.rs b/crates/project/src/lsp_command/signature_help.rs index 163c6ae1346a9a..bf197a11ba1ee8 100644 --- a/crates/project/src/lsp_command/signature_help.rs +++ b/crates/project/src/lsp_command/signature_help.rs @@ -86,7 +86,7 @@ impl SignatureHelp { } else { let markdown = markdown.join(str_for_join); let language_name = language - .map(|n| n.name().to_lowercase()) + .map(|n| n.name().0.to_lowercase()) .unwrap_or_default(); let markdown = if function_options_count >= 2 { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 1d9ca98c064720..b218ac58042f11 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -15,7 +15,7 @@ use async_trait::async_trait; use client::{proto, TypedEnvelope}; use collections::{btree_map, BTreeMap, HashMap, HashSet}; use futures::{ - future::{join_all, Shared}, + future::{join_all, BoxFuture, Shared}, select, stream::FuturesUnordered, Future, FutureExt, StreamExt, @@ -25,22 +25,26 @@ use gpui::{ AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel, }; -use http_client::HttpClient; +use http_client::{AsyncBody, Error, HttpClient, Request, Response, Uri}; use itertools::Itertools; use language::{ - language_settings::{language_settings, AllLanguageSettings, LanguageSettings}, + language_settings::{ + all_language_settings, language_settings, AllLanguageSettings, LanguageSettings, + }, markdown, point_to_lsp, prepare_completion_documentation, proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, - DiagnosticEntry, DiagnosticSet, Documentation, File as _, Language, LanguageRegistry, - LanguageServerName, LocalFile, LspAdapterDelegate, Patch, PendingLanguageServer, PointUtf16, - TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, + DiagnosticEntry, DiagnosticSet, Documentation, File as _, Language, LanguageConfig, + LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, + LspAdapterDelegate, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, + ToPointUtf16, Transaction, Unclipped, }; use lsp::{ - CompletionContext, DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, - Edit, FileSystemWatcher, InsertTextFormat, LanguageServer, LanguageServerBinary, - LanguageServerId, LspRequestFuture, MessageActionItem, MessageType, OneOf, ServerHealthStatus, - ServerStatus, SymbolKind, TextEdit, Url, WorkDoneProgressCancelParams, WorkspaceFolder, + CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, + DidChangeWatchedFilesRegistrationOptions, Edit, FileSystemWatcher, InsertTextFormat, + LanguageServer, LanguageServerBinary, LanguageServerId, LspRequestFuture, MessageActionItem, + MessageType, OneOf, ServerHealthStatus, ServerStatus, SymbolKind, TextEdit, Url, + WorkDoneProgressCancelParams, WorkspaceFolder, }; use parking_lot::{Mutex, RwLock}; use postage::watch; @@ -54,6 +58,7 @@ use similar::{ChangeTag, TextDiff}; use smol::channel::Sender; use snippet::Snippet; use std::{ + any::Any, cmp::Ordering, convert::TryInto, ffi::OsStr, @@ -85,27 +90,86 @@ const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1); const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); pub const SERVER_PROGRESS_THROTTLE_TIMEOUT: Duration = Duration::from_millis(100); +pub struct LocalLspStore { + http_client: Option>, + environment: Model, + fs: Arc, + yarn: Model, + pub language_servers: HashMap, + last_workspace_edits_by_language_server: HashMap, + language_server_watched_paths: HashMap>, + language_server_watcher_registrations: + HashMap>>, + supplementary_language_servers: + HashMap)>, + _subscription: gpui::Subscription, +} + +impl LocalLspStore { + fn shutdown_language_servers( + &mut self, + _cx: &mut ModelContext, + ) -> impl Future { + let shutdown_futures = self + .language_servers + .drain() + .map(|(_, server_state)| async { + use LanguageServerState::*; + match server_state { + Running { server, .. } => server.shutdown()?.await, + Starting(task) => task.await?.shutdown()?.await, + } + }) + .collect::>(); + + async move { + futures::future::join_all(shutdown_futures).await; + } + } +} + +pub struct RemoteLspStore { + upstream_client: AnyProtoClient, +} + +impl RemoteLspStore {} + +pub struct SshLspStore { + upstream_client: AnyProtoClient, +} + +#[allow(clippy::large_enum_variant)] +pub enum LspStoreMode { + Local(LocalLspStore), // ssh host and collab host + Remote(RemoteLspStore), // collab guest + Ssh(SshLspStore), // ssh client +} + +impl LspStoreMode { + fn is_local(&self) -> bool { + matches!(self, LspStoreMode::Local(_)) + } + + fn is_ssh(&self) -> bool { + matches!(self, LspStoreMode::Ssh(_)) + } + + fn is_remote(&self) -> bool { + matches!(self, LspStoreMode::Remote(_)) + } +} + pub struct LspStore { + mode: LspStoreMode, downstream_client: Option, - upstream_client: Option, project_id: u64, - http_client: Option>, - fs: Arc, nonce: u128, buffer_store: Model, worktree_store: Model, buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots - environment: Option>, - supplementary_language_servers: - HashMap)>, - languages: Arc, - language_servers: HashMap, + pub languages: Arc, language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, - language_server_statuses: BTreeMap, - last_workspace_edits_by_language_server: HashMap, - language_server_watched_paths: HashMap>, - language_server_watcher_registrations: - HashMap>>, + pub language_server_statuses: BTreeMap, active_entry: Option, _maintain_workspace_config: Task>, _maintain_buffer_languages: Task<()>, @@ -122,8 +186,6 @@ pub struct LspStore { )>, >, >, - yarn: Model, - _subscription: gpui::Subscription, } pub enum LspStoreEvent { @@ -209,17 +271,53 @@ impl LspStore { client.add_model_request_handler(Self::handle_lsp_command::); } - #[allow(clippy::too_many_arguments)] - pub fn new( + pub fn as_remote(&self) -> Option<&RemoteLspStore> { + match &self.mode { + LspStoreMode::Remote(remote_lsp_store) => Some(remote_lsp_store), + _ => None, + } + } + + pub fn as_ssh(&self) -> Option<&SshLspStore> { + match &self.mode { + LspStoreMode::Ssh(ssh_lsp_store) => Some(ssh_lsp_store), + _ => None, + } + } + + pub fn as_local(&self) -> Option<&LocalLspStore> { + match &self.mode { + LspStoreMode::Local(local_lsp_store) => Some(local_lsp_store), + _ => None, + } + } + + pub fn as_local_mut(&mut self) -> Option<&mut LocalLspStore> { + match &mut self.mode { + LspStoreMode::Local(local_lsp_store) => Some(local_lsp_store), + _ => None, + } + } + + pub fn upstream_client(&self) -> Option { + match &self.mode { + LspStoreMode::Ssh(SshLspStore { + upstream_client, .. + }) + | LspStoreMode::Remote(RemoteLspStore { + upstream_client, .. + }) => Some(upstream_client.clone()), + LspStoreMode::Local(_) => None, + } + } + + pub fn new_local( buffer_store: Model, worktree_store: Model, - environment: Option>, + environment: Model, languages: Arc, http_client: Option>, fs: Arc, - downstream_client: Option, - upstream_client: Option, - remote_id: Option, cx: &mut ModelContext, ) -> Self { let yarn = YarnPathStore::new(fs.clone(), cx); @@ -229,32 +327,118 @@ impl LspStore { .detach(); Self { - downstream_client, - upstream_client, - http_client, - fs, - project_id: remote_id.unwrap_or(0), + mode: LspStoreMode::Local(LocalLspStore { + supplementary_language_servers: Default::default(), + language_servers: Default::default(), + last_workspace_edits_by_language_server: Default::default(), + language_server_watched_paths: Default::default(), + language_server_watcher_registrations: Default::default(), + environment, + http_client, + fs, + yarn, + _subscription: cx.on_app_quit(|this, cx| { + this.as_local_mut().unwrap().shutdown_language_servers(cx) + }), + }), + downstream_client: None, + project_id: 0, + buffer_store, + worktree_store, + languages: languages.clone(), + language_server_ids: Default::default(), + language_server_statuses: Default::default(), + nonce: StdRng::from_entropy().gen(), + buffer_snapshots: Default::default(), + next_diagnostic_group_id: Default::default(), + diagnostic_summaries: Default::default(), + diagnostics: Default::default(), + active_entry: None, + _maintain_workspace_config: Self::maintain_workspace_config(cx), + _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), + } + } + + fn send_lsp_proto_request( + &self, + buffer: Model, + client: AnyProtoClient, + request: R, + cx: &mut ModelContext<'_, LspStore>, + ) -> Task::Response>> { + let message = request.to_proto(self.project_id, buffer.read(cx)); + cx.spawn(move |this, cx| async move { + let response = client.request(message).await?; + let this = this.upgrade().context("project dropped")?; + request + .response_from_proto(response, this, buffer, cx) + .await + }) + } + + pub fn new_ssh( + buffer_store: Model, + worktree_store: Model, + languages: Arc, + upstream_client: AnyProtoClient, + project_id: u64, + cx: &mut ModelContext, + ) -> Self { + cx.subscribe(&buffer_store, Self::on_buffer_store_event) + .detach(); + cx.subscribe(&worktree_store, Self::on_worktree_store_event) + .detach(); + + Self { + mode: LspStoreMode::Ssh(SshLspStore { upstream_client }), + downstream_client: None, + project_id, buffer_store, worktree_store, languages: languages.clone(), - environment, + language_server_ids: Default::default(), + language_server_statuses: Default::default(), nonce: StdRng::from_entropy().gen(), buffer_snapshots: Default::default(), - supplementary_language_servers: Default::default(), - language_servers: Default::default(), + next_diagnostic_group_id: Default::default(), + diagnostic_summaries: Default::default(), + diagnostics: Default::default(), + active_entry: None, + _maintain_workspace_config: Self::maintain_workspace_config(cx), + _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), + } + } + + pub fn new_remote( + buffer_store: Model, + worktree_store: Model, + languages: Arc, + upstream_client: AnyProtoClient, + project_id: u64, + cx: &mut ModelContext, + ) -> Self { + cx.subscribe(&buffer_store, Self::on_buffer_store_event) + .detach(); + cx.subscribe(&worktree_store, Self::on_worktree_store_event) + .detach(); + + Self { + mode: LspStoreMode::Remote(RemoteLspStore { upstream_client }), + downstream_client: None, + project_id, + buffer_store, + worktree_store, + languages: languages.clone(), language_server_ids: Default::default(), language_server_statuses: Default::default(), - last_workspace_edits_by_language_server: Default::default(), - language_server_watched_paths: Default::default(), - language_server_watcher_registrations: Default::default(), + nonce: StdRng::from_entropy().gen(), + buffer_snapshots: Default::default(), next_diagnostic_group_id: Default::default(), diagnostic_summaries: Default::default(), diagnostics: Default::default(), active_entry: None, - yarn, _maintain_workspace_config: Self::maintain_workspace_config(cx), _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), - _subscription: cx.on_app_quit(Self::shutdown_language_servers), } } @@ -273,7 +457,6 @@ impl LspStore { self.unregister_buffer_from_language_servers(buffer, old_file, cx); } - self.detect_language_for_buffer(buffer, cx); self.register_buffer_with_language_servers(buffer, cx); } BufferStoreEvent::BufferDropped(_) => {} @@ -338,7 +521,6 @@ impl LspStore { }) .detach(); - self.detect_language_for_buffer(buffer, cx); self.register_buffer_with_language_servers(buffer, cx); cx.observe_release(buffer, |this, buffer, cx| { if let Some(file) = File::from_dyn(buffer.file()) { @@ -406,9 +588,7 @@ impl LspStore { buffers_with_unknown_injections.push(handle); } } - for buffer in plain_text_buffers { - this.detect_language_for_buffer(&buffer, cx); this.register_buffer_with_language_servers(&buffer, cx); } @@ -426,34 +606,29 @@ impl LspStore { &mut self, buffer_handle: &Model, cx: &mut ModelContext, - ) { + ) -> Option { // If the buffer has a language, set it and start the language server if we haven't already. let buffer = buffer_handle.read(cx); - let Some(file) = buffer.file() else { - return; - }; - let content = buffer.as_rope(); - let Some(new_language_result) = self - .languages - .language_for_file(file, Some(content), cx) - .now_or_never() - else { - return; - }; + let file = buffer.file()?; - match new_language_result { - Err(e) => { - if e.is::() { - cx.emit(LspStoreEvent::LanguageDetected { - buffer: buffer_handle.clone(), - new_language: None, - }); - } - } - Ok(new_language) => { + let content = buffer.as_rope(); + let available_language = self.languages.language_for_file(file, Some(content), cx); + if let Some(available_language) = &available_language { + if let Some(Ok(Ok(new_language))) = self + .languages + .load_language(available_language) + .now_or_never() + { self.set_language_for_buffer(buffer_handle, new_language, cx); } - }; + } else { + cx.emit(LspStoreEvent::LanguageDetected { + buffer: buffer_handle.clone(), + new_language: None, + }); + } + + available_language } pub fn set_language_for_buffer( @@ -475,9 +650,7 @@ impl LspStore { if let Some(file) = buffer_file { let worktree = file.worktree.clone(); - if worktree.read(cx).is_local() { - self.start_language_servers(&worktree, new_language.clone(), cx) - } + self.start_language_servers(&worktree, new_language.name(), cx) } cx.emit(LspStoreEvent::LanguageDetected { @@ -494,27 +667,6 @@ impl LspStore { self.active_entry = active_entry; } - fn shutdown_language_servers( - &mut self, - _cx: &mut ModelContext, - ) -> impl Future { - let shutdown_futures = self - .language_servers - .drain() - .map(|(_, server_state)| async { - use LanguageServerState::*; - match server_state { - Running { server, .. } => server.shutdown()?.await, - Starting(task) => task.await?.shutdown()?.await, - } - }) - .collect::>(); - - async move { - futures::future::join_all(shutdown_futures).await; - } - } - pub(crate) fn send_diagnostic_summaries( &self, worktree: &mut Worktree, @@ -547,9 +699,11 @@ impl LspStore { ::Params: Send, { let buffer = buffer_handle.read(cx); - if self.upstream_client.is_some() { - return self.send_lsp_proto_request(buffer_handle, self.project_id, request, cx); + + if let Some(upstream_client) = self.upstream_client() { + return self.send_lsp_proto_request(buffer_handle, upstream_client, request, cx); } + let language_server = match server { LanguageServerToQuery::Primary => { match self.primary_language_server_for_buffer(buffer, cx) { @@ -635,26 +789,6 @@ impl LspStore { Task::ready(Ok(Default::default())) } - fn send_lsp_proto_request( - &self, - buffer: Model, - project_id: u64, - request: R, - cx: &mut ModelContext<'_, Self>, - ) -> Task::Response>> { - let Some(upstream_client) = self.upstream_client.clone() else { - return Task::ready(Err(anyhow!("disconnected before completing request"))); - }; - let message = request.to_proto(project_id, buffer.read(cx)); - cx.spawn(move |this, cx| async move { - let response = upstream_client.request(message).await?; - let this = this.upgrade().context("project dropped")?; - request - .response_from_proto(response, this, buffer, cx) - .await - }) - } - pub async fn execute_code_actions_on_servers( this: &WeakModel, adapters_and_servers: &[(Arc, Arc)], @@ -702,8 +836,10 @@ impl LspStore { if let Some(command) = action.lsp_action.command { this.update(cx, |this, _| { - this.last_workspace_edits_by_language_server - .remove(&language_server.server_id()); + if let LspStoreMode::Local(mode) = &mut this.mode { + mode.last_workspace_edits_by_language_server + .remove(&language_server.server_id()); + } })?; language_server @@ -715,12 +851,14 @@ impl LspStore { .await?; this.update(cx, |this, _| { - project_transaction.0.extend( - this.last_workspace_edits_by_language_server - .remove(&language_server.server_id()) - .unwrap_or_default() - .0, - ) + if let LspStoreMode::Local(mode) = &mut this.mode { + project_transaction.0.extend( + mode.last_workspace_edits_by_language_server + .remove(&language_server.server_id()) + .unwrap_or_default() + .0, + ) + } })?; } } @@ -752,7 +890,7 @@ impl LspStore { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client.clone() { + if let Some(upstream_client) = self.upstream_client() { let request = proto::ApplyCodeAction { project_id: self.project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), @@ -801,7 +939,9 @@ impl LspStore { if let Some(command) = action.lsp_action.command { this.update(&mut cx, |this, _| { - this.last_workspace_edits_by_language_server + this.as_local_mut() + .unwrap() + .last_workspace_edits_by_language_server .remove(&lang_server.server_id()); })?; @@ -816,7 +956,9 @@ impl LspStore { result?; return this.update(&mut cx, |this, _| { - this.last_workspace_edits_by_language_server + this.as_local_mut() + .unwrap() + .last_workspace_edits_by_language_server .remove(&lang_server.server_id()) .unwrap_or_default() }); @@ -834,7 +976,7 @@ impl LspStore { server_id: LanguageServerId, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client.clone() { + if let Some(upstream_client) = self.upstream_client() { let request = proto::ResolveInlayHint { project_id: self.project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), @@ -912,7 +1054,7 @@ impl LspStore { .map(|(_, server)| LanguageServerToQuery::Other(server.server_id())) .next() .or_else(|| { - self.upstream_client + self.upstream_client() .is_some() .then_some(LanguageServerToQuery::Primary) }) @@ -945,7 +1087,7 @@ impl LspStore { trigger: String, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request = proto::OnTypeFormatting { project_id: self.project_id, buffer_id: buffer.read(cx).remote_id().into(), @@ -1095,7 +1237,7 @@ impl LspStore { range: Range, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client.as_ref() { + if let Some(upstream_client) = self.upstream_client() { let request_task = upstream_client.request(proto::MultiLspQuery { buffer_id: buffer_handle.read(cx).remote_id().into(), version: serialize_version(&buffer_handle.read(cx).version()), @@ -1175,10 +1317,10 @@ impl LspStore { ) -> Task>> { let language_registry = self.languages.clone(); - if let Some(_) = self.upstream_client.clone() { + if let Some(upstream_client) = self.upstream_client() { let task = self.send_lsp_proto_request( buffer.clone(), - self.project_id, + upstream_client, GetCompletions { position, context }, cx, ); @@ -1187,9 +1329,12 @@ impl LspStore { // In the future, we should provide project guests with the names of LSP adapters, // so that they can use the correct LSP adapter when computing labels. For now, // guests just use the first LSP adapter associated with the buffer's language. - let lsp_adapter = language - .as_ref() - .and_then(|language| language_registry.lsp_adapters(language).first().cloned()); + let lsp_adapter = language.as_ref().and_then(|language| { + language_registry + .lsp_adapters(&language.name()) + .first() + .cloned() + }); cx.foreground_executor().spawn(async move { let completions = task.await?; @@ -1269,7 +1414,7 @@ impl LspStore { completions: Arc>>, cx: &mut ModelContext, ) -> Task> { - let client = self.upstream_client.clone(); + let client = self.upstream_client(); let language_registry = self.languages.clone(); let project_id = self.project_id; @@ -1478,7 +1623,7 @@ impl LspStore { let buffer = buffer_handle.read(cx); let buffer_id = buffer.remote_id(); - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let project_id = self.project_id; cx.spawn(move |_, mut cx| async move { let response = client @@ -1594,7 +1739,7 @@ impl LspStore { let buffer_id = buffer.remote_id().into(); let lsp_request = InlayHints { range }; - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request = proto::InlayHints { project_id: self.project_id, buffer_id, @@ -1644,7 +1789,7 @@ impl LspStore { ) -> Task> { let position = position.to_point_utf16(buffer.read(cx)); - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), @@ -1716,7 +1861,7 @@ impl LspStore { position: PointUtf16, cx: &mut ModelContext, ) -> Task> { - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), @@ -1790,7 +1935,7 @@ impl LspStore { pub fn symbols(&self, query: &str, cx: &mut ModelContext) -> Task>> { let language_registry = self.languages.clone(); - if let Some(upstream_client) = self.upstream_client.as_ref() { + if let Some(upstream_client) = self.upstream_client().as_ref() { let request = upstream_client.request(proto::GetProjectSymbols { project_id: self.project_id, query: query.to_string(), @@ -1816,7 +1961,7 @@ impl LspStore { } else { struct WorkspaceSymbolsResult { lsp_adapter: Arc, - language: Arc, + language: LanguageName, worktree: WeakModel, worktree_abs_path: Arc, lsp_symbols: Vec<(String, SymbolKind, lsp::Location)>, @@ -1837,16 +1982,17 @@ impl LspStore { } let worktree_abs_path = worktree.abs_path().clone(); - let (lsp_adapter, language, server) = match self.language_servers.get(server_id) { - Some(LanguageServerState::Running { - adapter, - language, - server, - .. - }) => (adapter.clone(), language.clone(), server), + let (lsp_adapter, language, server) = + match self.as_local().unwrap().language_servers.get(server_id) { + Some(LanguageServerState::Running { + adapter, + language, + server, + .. + }) => (adapter.clone(), language.clone(), server), - _ => continue, - }; + _ => continue, + }; requests.push( server @@ -2105,7 +2251,7 @@ impl LspStore { uri: lsp::Url::from_file_path(abs_path).log_err()?, }; - for (_, _, server) in self.language_servers_for_worktree(worktree_id) { + for server in self.language_servers_for_worktree(worktree_id) { if let Some(include_text) = include_text(server.as_ref()) { let text = if include_text { Some(buffer.read(cx).text()) @@ -2148,8 +2294,9 @@ impl LspStore { .worktree_store .read(cx) .worktree_for_id(*worktree_id, cx)?; - let state = this.language_servers.get(server_id)?; - let delegate = ProjectLspAdapterDelegate::new(this, &worktree, cx); + let state = this.as_local()?.language_servers.get(server_id)?; + let delegate = + ProjectLspAdapterDelegate::for_local(this, &worktree, cx); match state { LanguageServerState::Starting(_) => None, LanguageServerState::Running { @@ -2204,19 +2351,15 @@ impl LspStore { fn language_servers_for_worktree( &self, worktree_id: WorktreeId, - ) -> impl Iterator, &Arc, &Arc)> { + ) -> impl Iterator> { self.language_server_ids .iter() .filter_map(move |((language_server_worktree_id, _), id)| { if *language_server_worktree_id == worktree_id { - if let Some(LanguageServerState::Running { - adapter, - language, - server, - .. - }) = self.language_servers.get(id) + if let Some(LanguageServerState::Running { server, .. }) = + self.as_local()?.language_servers.get(id) { - return Some((adapter, language, server)); + return Some(server); } } None @@ -2241,11 +2384,17 @@ impl LspStore { self.language_server_ids .remove(&(id_to_remove, server_name)); self.language_server_statuses.remove(&server_id_to_remove); - self.language_server_watched_paths - .remove(&server_id_to_remove); - self.last_workspace_edits_by_language_server - .remove(&server_id_to_remove); - self.language_servers.remove(&server_id_to_remove); + if let Some(local_lsp_store) = self.as_local_mut() { + local_lsp_store + .language_server_watched_paths + .remove(&server_id_to_remove); + local_lsp_store + .last_workspace_edits_by_language_server + .remove(&server_id_to_remove); + local_lsp_store + .language_servers + .remove(&server_id_to_remove); + } cx.emit(LspStoreEvent::LanguageServerRemoved(server_id_to_remove)); } } @@ -2306,11 +2455,14 @@ impl LspStore { .insert((worktree_id, language_server_name), language_server_id); } + #[track_caller] pub(crate) fn register_buffer_with_language_servers( &mut self, buffer_handle: &Model, cx: &mut ModelContext, ) { + let available_language = self.detect_language_for_buffer(buffer_handle, cx); + let buffer = buffer_handle.read(cx); let buffer_id = buffer.remote_id(); @@ -2324,7 +2476,6 @@ impl LspStore { return; }; let initial_snapshot = buffer.text_snapshot(); - let language = buffer.language().cloned(); let worktree_id = file.worktree_id(cx); if let Some(diagnostics) = self.diagnostics.get(&worktree_id) { @@ -2336,12 +2487,12 @@ impl LspStore { } } - if let Some(language) = language { - for adapter in self.languages.lsp_adapters(&language) { + if let Some(language) = available_language { + for adapter in self.languages.lsp_adapters(&language.name()) { let server = self .language_server_ids .get(&(worktree_id, adapter.name.clone())) - .and_then(|id| self.language_servers.get(id)) + .and_then(|id| self.as_local()?.language_servers.get(id)) .and_then(|server_state| { if let LanguageServerState::Running { server, .. } = server_state { Some(server.clone()) @@ -2359,7 +2510,7 @@ impl LspStore { lsp::DidOpenTextDocumentParams { text_document: lsp::TextDocumentItem::new( uri.clone(), - adapter.language_id(&language), + adapter.language_id(&language.name()), 0, initial_snapshot.text(), ), @@ -2409,7 +2560,7 @@ impl LspStore { let ids = &self.language_server_ids; if let Some(language) = buffer.language().cloned() { - for adapter in self.languages.lsp_adapters(&language) { + for adapter in self.languages.lsp_adapters(&language.name()) { if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) { buffer.update_diagnostics(*server_id, Default::default(), cx); } @@ -2537,7 +2688,7 @@ impl LspStore { symbol: &Symbol, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request = client.request(proto::OpenBufferForSymbol { project_id: self.project_id, symbol: Some(Self::serialize_symbol(symbol)), @@ -2605,7 +2756,7 @@ impl LspStore { let p = abs_path.clone(); let yarn_worktree = this .update(&mut cx, move |this, cx| { - this.yarn.update(cx, |_, cx| { + this.as_local().unwrap().yarn.update(cx, |_, cx| { cx.spawn(|this, mut cx| async move { let t = this .update(&mut cx, |this, cx| { @@ -2755,7 +2906,7 @@ impl LspStore { ::Result: Send, ::Params: Send, { - debug_assert!(self.upstream_client.is_none()); + debug_assert!(self.upstream_client().is_none()); let snapshot = buffer.read(cx).snapshot(); let scope = position.and_then(|position| snapshot.language_scope_at(position)); @@ -2801,7 +2952,7 @@ impl LspStore { ::Params: Send, ::Result: Send, { - let sender_id = envelope.original_sender_id()?; + let sender_id = envelope.original_sender_id().unwrap_or_default(); let buffer_id = T::buffer_id_from_proto(&envelope.payload)?; let buffer_handle = this.update(&mut cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) @@ -2839,7 +2990,7 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let sender_id = envelope.original_sender_id()?; + let sender_id = envelope.original_sender_id().unwrap_or_default(); let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let version = deserialize_version(&envelope.payload.version); let buffer = this.update(&mut cx, |this, cx| { @@ -2979,7 +3130,7 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let sender_id = envelope.original_sender_id()?; + let sender_id = envelope.original_sender_id().unwrap_or_default(); let action = Self::deserialize_code_action( envelope .payload @@ -3184,7 +3335,9 @@ impl LspStore { simulate_disk_based_diagnostics_completion, adapter, .. - }) = self.language_servers.get_mut(&language_server_id) + }) = self + .as_local_mut() + .and_then(|local_store| local_store.language_servers.get_mut(&language_server_id)) else { return; }; @@ -3205,8 +3358,9 @@ impl LspStore { if let Some(LanguageServerState::Running { simulate_disk_based_diagnostics_completion, .. - }) = this.language_servers.get_mut(&language_server_id) - { + }) = this.as_local_mut().and_then(|local_store| { + local_store.language_servers.get_mut(&language_server_id) + }) { *simulate_disk_based_diagnostics_completion = None; } }) @@ -3264,7 +3418,20 @@ impl LspStore { language_server_id: LanguageServerId, cx: &mut ModelContext, ) { - let Some(watchers) = self + let worktrees = self + .worktree_store + .read(cx) + .worktrees() + .filter_map(|worktree| { + self.language_servers_for_worktree(worktree.read(cx).id()) + .find(|server| server.server_id() == language_server_id) + .map(|_| worktree) + }) + .collect::>(); + + let local_lsp_store = self.as_local_mut().unwrap(); + + let Some(watchers) = local_lsp_store .language_server_watcher_registrations .get(&language_server_id) else { @@ -3278,17 +3445,6 @@ impl LspStore { language_server_id ); - let worktrees = self - .worktree_store - .read(cx) - .worktrees() - .filter_map(|worktree| { - self.language_servers_for_worktree(worktree.read(cx).id()) - .find(|(_, _, server)| server.server_id() == language_server_id) - .map(|_| worktree) - }) - .collect::>(); - enum PathToWatch { Worktree { literal_prefix: Arc, @@ -3438,20 +3594,29 @@ impl LspStore { watch_builder.watch_abs_path(abs_path, globset); } } - let watcher = watch_builder.build(self.fs.clone(), language_server_id, cx); - self.language_server_watched_paths + let watcher = watch_builder.build(local_lsp_store.fs.clone(), language_server_id, cx); + local_lsp_store + .language_server_watched_paths .insert(language_server_id, watcher); cx.notify(); } pub fn language_server_for_id(&self, id: LanguageServerId) -> Option> { - if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) { - Some(server.clone()) - } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) { - Some(Arc::clone(server)) - } else { - None + if let Some(local_lsp_store) = self.as_local() { + if let Some(LanguageServerState::Running { server, .. }) = + local_lsp_store.language_servers.get(&id) + { + Some(server.clone()) + } else if let Some((_, server)) = + local_lsp_store.supplementary_language_servers.get(&id) + { + Some(Arc::clone(server)) + } else { + None + } + } else { + None } } @@ -3480,7 +3645,9 @@ impl LspStore { .log_err(); this.update(&mut cx, |this, _| { if let Some(transaction) = transaction { - this.last_workspace_edits_by_language_server + this.as_local_mut() + .unwrap() + .last_workspace_edits_by_language_server .insert(server_id, transaction); } })?; @@ -3665,14 +3832,16 @@ impl LspStore { params: DidChangeWatchedFilesRegistrationOptions, cx: &mut ModelContext, ) { - let registrations = self - .language_server_watcher_registrations - .entry(language_server_id) - .or_default(); + if let Some(local) = self.as_local_mut() { + let registrations = local + .language_server_watcher_registrations + .entry(language_server_id) + .or_default(); - registrations.insert(registration_id.to_string(), params.watchers); + registrations.insert(registration_id.to_string(), params.watchers); - self.rebuild_watched_paths(language_server_id, cx); + self.rebuild_watched_paths(language_server_id, cx); + } } fn on_lsp_unregister_did_change_watched_files( @@ -3681,26 +3850,28 @@ impl LspStore { registration_id: &str, cx: &mut ModelContext, ) { - let registrations = self - .language_server_watcher_registrations - .entry(language_server_id) - .or_default(); + if let Some(local) = self.as_local_mut() { + let registrations = local + .language_server_watcher_registrations + .entry(language_server_id) + .or_default(); - if registrations.remove(registration_id).is_some() { - log::info!( + if registrations.remove(registration_id).is_some() { + log::info!( "language server {}: unregistered workspace/DidChangeWatchedFiles capability with id {}", language_server_id, registration_id ); - } else { - log::warn!( + } else { + log::warn!( "language server {}: failed to unregister workspace/DidChangeWatchedFiles capability with id {}. not registered.", language_server_id, registration_id ); - } + } - self.rebuild_watched_paths(language_server_id, cx); + self.rebuild_watched_paths(language_server_id, cx); + } } #[allow(clippy::type_complexity)] @@ -3915,7 +4086,7 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let sender_id = envelope.original_sender_id()?; + let sender_id = envelope.original_sender_id().unwrap_or_default(); let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let buffer = this.update(&mut cx, |this, cx| { this.buffer_store.read(cx).get_existing(buffer_id) @@ -3991,7 +4162,7 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let peer_id = envelope.original_sender_id()?; + let peer_id = envelope.original_sender_id().unwrap_or_default(); let symbol = envelope .payload .symbol @@ -4093,6 +4264,76 @@ impl LspStore { Ok(proto::Ack {}) } + pub async fn handle_create_language_server( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let name = LanguageServerName::from_proto(envelope.payload.name); + + let binary = envelope + .payload + .binary + .ok_or_else(|| anyhow!("missing binary"))?; + let binary = LanguageServerBinary { + path: PathBuf::from(binary.path), + env: None, + arguments: binary.arguments.into_iter().map(Into::into).collect(), + }; + let language = envelope + .payload + .language + .ok_or_else(|| anyhow!("missing language"))?; + let language_name = LanguageName::from_proto(language.name); + let matcher: LanguageMatcher = serde_json::from_str(&language.matcher)?; + this.update(&mut cx, |this, cx| { + this.languages + .register_language(language_name.clone(), None, matcher.clone(), { + let language_name = language_name.clone(); + move || { + Ok(( + LanguageConfig { + name: language_name.clone(), + matcher: matcher.clone(), + ..Default::default() + }, + Default::default(), + Default::default(), + )) + } + }); + cx.background_executor() + .spawn(this.languages.language_for_name(language_name.0.as_ref())) + .detach(); + + let adapter = Arc::new(SshLspAdapter::new( + name, + binary, + envelope.payload.initialization_options, + envelope.payload.code_action_kinds, + )); + + this.languages + .register_lsp_adapter(language_name.clone(), adapter.clone()); + let Some(worktree) = this + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + else { + return Err(anyhow!("worktree not found")); + }; + this.start_language_server( + &worktree, + CachedLspAdapter::new(adapter), + language_name, + cx, + ); + Ok(()) + })??; + Ok(proto::Ack {}) + } + async fn handle_apply_additional_edits_for_completion( this: Model, envelope: TypedEnvelope, @@ -4139,16 +4380,24 @@ impl LspStore { }) } + fn language_settings<'a>( + &'a self, + worktree: &'a Model, + language: &LanguageName, + cx: &'a mut ModelContext, + ) -> &'a LanguageSettings { + let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx)); + all_language_settings(root_file.map(|f| f as _).as_ref(), cx).language(Some(language)) + } + pub fn start_language_servers( &mut self, worktree: &Model, - language: Arc, + language: LanguageName, cx: &mut ModelContext, ) { - let (root_file, is_local) = - worktree.update(cx, |tree, cx| (tree.root_file(cx), tree.is_local())); - let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx); - if !settings.enable_language_server || !is_local { + let settings = self.language_settings(worktree, &language, cx); + if !settings.enable_language_server || self.mode.is_remote() { return; } @@ -4176,7 +4425,7 @@ impl LspStore { .load_available_lsp_adapter(&desired_language_server) { self.languages - .register_lsp_adapter(language.name(), adapter.adapter.clone()); + .register_lsp_adapter(language.clone(), adapter.adapter.clone()); enabled_lsp_adapters.push(adapter); continue; } @@ -4189,7 +4438,6 @@ impl LspStore { log::info!( "starting language servers for {language}: {adapters}", - language = language.name(), adapters = enabled_lsp_adapters .iter() .map(|adapter| adapter.name.0.as_ref()) @@ -4210,14 +4458,108 @@ impl LspStore { .reorder_language_servers(&language, enabled_lsp_adapters); } + /* + ssh client owns the lifecycle of the language servers + ssh host actually runs the binaries + + in the future: ssh client will use the local extensions to get the downloads etc. + and send them up over the ssh connection (but today) we'll just the static config + + languages::() <-- registers lsp adapters + on the ssh host we won't have adapters for the LSPs + */ + + fn start_language_server_on_ssh_host( + &mut self, + worktree: &Model, + adapter: Arc, + language: LanguageName, + cx: &mut ModelContext, + ) { + let ssh = self.as_ssh().unwrap(); + + let configured_binary = ProjectSettings::get( + Some(worktree.update(cx, |worktree, cx| worktree.settings_location(cx))), + cx, + ) + .lsp + .get(&adapter.name()) + .and_then(|c| c.binary.as_ref()) + .and_then(|config| { + if let Some(path) = &config.path { + Some((path.clone(), config.arguments.clone().unwrap_or_default())) + } else { + None + } + }); + let delegate = + ProjectLspAdapterDelegate::for_ssh(self, worktree, cx) as Arc; + let project_id = self.project_id; + let worktree_id = worktree.read(cx).id().to_proto(); + let upstream_client = ssh.upstream_client.clone(); + let name = adapter.name().to_string(); + let Some((path, arguments)) = configured_binary else { + cx.emit(LspStoreEvent::Notification(format!( + "ssh-remoting currently requires manually configuring {} in your settings", + adapter.name() + ))); + return; + }; + let Some(available_language) = self.languages.available_language_for_name(&language) else { + log::error!("failed to find available language {language}"); + return; + }; + let task = cx.spawn(|_, _| async move { + let delegate = delegate; + let name = adapter.name().to_string(); + let code_action_kinds = adapter + .adapter + .code_action_kinds() + .map(|kinds| serde_json::to_string(&kinds)) + .transpose()?; + let get_options = adapter.adapter.clone().initialization_options(&delegate); + let initialization_options = get_options + .await? + .map(|options| serde_json::to_string(&options)) + .transpose()?; + + upstream_client + .request(proto::CreateLanguageServer { + project_id, + worktree_id, + name, + binary: Some(proto::LanguageServerCommand { path, arguments }), + initialization_options, + code_action_kinds, + language: Some(proto::AvailableLanguage { + name: language.to_proto(), + matcher: serde_json::to_string(&available_language.matcher())?, + }), + }) + .await + }); + cx.spawn(|this, mut cx| async move { + if let Err(e) = task.await { + this.update(&mut cx, |_this, cx| { + cx.emit(LspStoreEvent::Notification(format!( + "failed to start {}: {}", + name, e + ))) + }) + .ok(); + } + }) + .detach(); + } + fn start_language_server( &mut self, worktree_handle: &Model, adapter: Arc, - language: Arc, + language: LanguageName, cx: &mut ModelContext, ) { - if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT { + if self.mode.is_remote() { return; } @@ -4229,12 +4571,24 @@ impl LspStore { return; } + if self.mode.is_ssh() { + self.start_language_server_on_ssh_host(worktree_handle, adapter, language, cx); + return; + } + + if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT { + return; + } + let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); - let lsp_adapter_delegate = ProjectLspAdapterDelegate::new(self, worktree_handle, cx); + let lsp_adapter_delegate = ProjectLspAdapterDelegate::for_local(self, worktree_handle, cx); let cli_environment = self + .as_local() + .unwrap() .environment - .as_ref() - .and_then(|environment| environment.read(cx).get_cli_environment()); + .read(cx) + .get_cli_environment(); + let pending_server = match self.languages.create_pending_language_server( stderr_capture.clone(), language.clone(), @@ -4255,6 +4609,8 @@ impl LspStore { }), cx, ); + + // We need some on the SSH client, and some on SSH host let lsp = project_settings.lsp.get(&adapter.name.0); let override_options = lsp.and_then(|s| s.initialization_options.clone()); @@ -4329,7 +4685,10 @@ impl LspStore { }) }); - self.language_servers.insert(server_id, state); + self.as_local_mut() + .unwrap() + .language_servers + .insert(server_id, state); self.language_server_ids.insert(key, server_id); } @@ -4340,7 +4699,7 @@ impl LspStore { override_initialization_options: Option, pending_server: PendingLanguageServer, adapter: Arc, - language: Arc, + language: LanguageName, server_id: LanguageServerId, key: (WorktreeId, LanguageServerName), cx: &mut AsyncAppContext, @@ -4377,51 +4736,63 @@ impl LspStore { fn reinstall_language_server( &mut self, - language: Arc, + language: LanguageName, adapter: Arc, server_id: LanguageServerId, cx: &mut ModelContext, ) -> Option> { log::info!("beginning to reinstall server"); - let existing_server = match self.language_servers.remove(&server_id) { - Some(LanguageServerState::Running { server, .. }) => Some(server), - _ => None, - }; + if let Some(local) = self.as_local_mut() { + let existing_server = match local.language_servers.remove(&server_id) { + Some(LanguageServerState::Running { server, .. }) => Some(server), + _ => None, + }; - self.worktree_store.update(cx, |store, cx| { - for worktree in store.worktrees() { - let key = (worktree.read(cx).id(), adapter.name.clone()); - self.language_server_ids.remove(&key); - } - }); + self.worktree_store.update(cx, |store, cx| { + for worktree in store.worktrees() { + let key = (worktree.read(cx).id(), adapter.name.clone()); + self.language_server_ids.remove(&key); + } + }); - Some(cx.spawn(move |this, mut cx| async move { - if let Some(task) = existing_server.and_then(|server| server.shutdown()) { - log::info!("shutting down existing server"); + Some(cx.spawn(move |this, mut cx| async move { + if let Some(task) = existing_server.and_then(|server| server.shutdown()) { + log::info!("shutting down existing server"); + task.await; + } + + // TODO: This is race-safe with regards to preventing new instances from + // starting while deleting, but existing instances in other projects are going + // to be very confused and messed up + let Some(task) = this + .update(&mut cx, |this, cx| { + this.languages.delete_server_container(adapter.clone(), cx) + }) + .log_err() + else { + return; + }; task.await; - } - // TODO: This is race-safe with regards to preventing new instances from - // starting while deleting, but existing instances in other projects are going - // to be very confused and messed up - let Some(task) = this - .update(&mut cx, |this, cx| { - this.languages.delete_server_container(adapter.clone(), cx) + this.update(&mut cx, |this, cx| { + for worktree in this.worktree_store.read(cx).worktrees().collect::>() { + this.start_language_server( + &worktree, + adapter.clone(), + language.clone(), + cx, + ); + } }) - .log_err() - else { - return; - }; - task.await; - - this.update(&mut cx, |this, cx| { - for worktree in this.worktree_store.read(cx).worktrees().collect::>() { - this.start_language_server(&worktree, adapter.clone(), language.clone(), cx); - } - }) - .ok(); - })) + .ok(); + })) + } else if let Some(_ssh_store) = self.as_ssh() { + // TODO + None + } else { + None + } } async fn shutdown_language_server( @@ -4469,76 +4840,90 @@ impl LspStore { cx: &mut ModelContext, ) -> Task> { let key = (worktree_id, adapter_name); - if let Some(server_id) = self.language_server_ids.remove(&key) { - let name = key.1 .0; - log::info!("stopping language server {name}"); - - // Remove other entries for this language server as well - let mut orphaned_worktrees = vec![worktree_id]; - let other_keys = self.language_server_ids.keys().cloned().collect::>(); - for other_key in other_keys { - if self.language_server_ids.get(&other_key) == Some(&server_id) { - self.language_server_ids.remove(&other_key); - orphaned_worktrees.push(other_key.0); + if self.mode.is_local() { + if let Some(server_id) = self.language_server_ids.remove(&key) { + let name = key.1 .0; + log::info!("stopping language server {name}"); + + // Remove other entries for this language server as well + let mut orphaned_worktrees = vec![worktree_id]; + let other_keys = self.language_server_ids.keys().cloned().collect::>(); + for other_key in other_keys { + if self.language_server_ids.get(&other_key) == Some(&server_id) { + self.language_server_ids.remove(&other_key); + orphaned_worktrees.push(other_key.0); + } } - } - self.buffer_store.update(cx, |buffer_store, cx| { - for buffer in buffer_store.buffers() { - buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics(server_id, Default::default(), cx); + self.buffer_store.update(cx, |buffer_store, cx| { + for buffer in buffer_store.buffers() { + buffer.update(cx, |buffer, cx| { + buffer.update_diagnostics(server_id, Default::default(), cx); + }); + } + }); + + let project_id = self.project_id; + for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { + summaries.retain(|path, summaries_by_server_id| { + if summaries_by_server_id.remove(&server_id).is_some() { + if let Some(downstream_client) = self.downstream_client.clone() { + downstream_client + .send(proto::UpdateDiagnosticSummary { + project_id, + worktree_id: worktree_id.to_proto(), + summary: Some(proto::DiagnosticSummary { + path: path.to_string_lossy().to_string(), + language_server_id: server_id.0 as u64, + error_count: 0, + warning_count: 0, + }), + }) + .log_err(); + } + !summaries_by_server_id.is_empty() + } else { + true + } }); } - }); - let project_id = self.project_id; - for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { - summaries.retain(|path, summaries_by_server_id| { - if summaries_by_server_id.remove(&server_id).is_some() { - if let Some(downstream_client) = self.downstream_client.clone() { - downstream_client - .send(proto::UpdateDiagnosticSummary { - project_id, - worktree_id: worktree_id.to_proto(), - summary: Some(proto::DiagnosticSummary { - path: path.to_string_lossy().to_string(), - language_server_id: server_id.0 as u64, - error_count: 0, - warning_count: 0, - }), - }) - .log_err(); + for diagnostics in self.diagnostics.values_mut() { + diagnostics.retain(|_, diagnostics_by_server_id| { + if let Ok(ix) = + diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) + { + diagnostics_by_server_id.remove(ix); + !diagnostics_by_server_id.is_empty() + } else { + true } - !summaries_by_server_id.is_empty() - } else { - true - } - }); - } + }); + } - for diagnostics in self.diagnostics.values_mut() { - diagnostics.retain(|_, diagnostics_by_server_id| { - if let Ok(ix) = - diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) - { - diagnostics_by_server_id.remove(ix); - !diagnostics_by_server_id.is_empty() - } else { - true - } - }); + self.as_local_mut() + .unwrap() + .language_server_watched_paths + .remove(&server_id); + self.language_server_statuses.remove(&server_id); + cx.notify(); + + let server_state = self + .as_local_mut() + .unwrap() + .language_servers + .remove(&server_id); + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); + cx.spawn(move |_, cx| async move { + Self::shutdown_language_server(server_state, name, cx).await; + orphaned_worktrees + }) + } else { + Task::ready(Vec::new()) } - - self.language_server_watched_paths.remove(&server_id); - self.language_server_statuses.remove(&server_id); - cx.notify(); - - let server_state = self.language_servers.remove(&server_id); - cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); - cx.spawn(move |_, cx| async move { - Self::shutdown_language_server(server_state, name, cx).await; - orphaned_worktrees - }) + } else if self.mode.is_ssh() { + // TODO ssh + Task::ready(Vec::new()) } else { Task::ready(Vec::new()) } @@ -4549,7 +4934,7 @@ impl LspStore { buffers: impl IntoIterator>, cx: &mut ModelContext, ) { - if let Some(client) = self.upstream_client.clone() { + if let Some(client) = self.upstream_client() { let request = client.request(proto::RestartLanguageServers { project_id: self.project_id, buffer_ids: buffers @@ -4562,18 +4947,17 @@ impl LspStore { .detach_and_log_err(cx); } else { #[allow(clippy::mutable_key_type)] - let language_server_lookup_info: HashSet<(Model, Arc)> = buffers + let language_server_lookup_info: HashSet<(Model, LanguageName)> = buffers .into_iter() .filter_map(|buffer| { let buffer = buffer.read(cx); let file = buffer.file()?; let worktree = File::from_dyn(Some(file))?.worktree.clone(); - let language = self - .languages - .language_for_file(file, Some(buffer.as_rope()), cx) - .now_or_never()? - .ok()?; - Some((worktree, language)) + let language = + self.languages + .language_for_file(file, Some(buffer.as_rope()), cx)?; + + Some((worktree, language.name())) }) .collect(); @@ -4586,7 +4970,7 @@ impl LspStore { pub fn restart_language_servers( &mut self, worktree: Model, - language: Arc, + language: LanguageName, cx: &mut ModelContext, ) { let worktree_id = worktree.read(cx).id(); @@ -4637,7 +5021,7 @@ impl LspStore { } fn check_errored_server( - language: Arc, + language: LanguageName, adapter: Arc, server_id: LanguageServerId, installation_test_binary: Option, @@ -4719,6 +5103,7 @@ impl LspStore { .clone() .workspace_configuration(&delegate, cx) .await?; + // This has to come from the server let (language_server, mut initialization_options) = pending_server.task.await?; let name = language_server.name(); @@ -4730,6 +5115,7 @@ impl LspStore { let adapter = adapter.clone(); if let Some(this) = this.upgrade() { adapter.process_diagnostics(&mut params); + // Everything else has to be on the server, Can we make it on the client? this.update(&mut cx, |this, cx| { this.update_diagnostics( server_id, @@ -5341,7 +5727,7 @@ impl LspStore { fn insert_newly_running_language_server( &mut self, - language: Arc, + language: LanguageName, adapter: Arc, language_server: Arc, server_id: LanguageServerId, @@ -5361,15 +5747,17 @@ impl LspStore { // Update language_servers collection with Running variant of LanguageServerState // indicating that the server is up and running and ready - self.language_servers.insert( - server_id, - LanguageServerState::Running { - adapter: adapter.clone(), - language: language.clone(), - server: language_server.clone(), - simulate_disk_based_diagnostics_completion: None, - }, - ); + if let Some(local) = self.as_local_mut() { + local.language_servers.insert( + server_id, + LanguageServerState::Running { + adapter: adapter.clone(), + language: language.clone(), + server: language_server.clone(), + simulate_disk_based_diagnostics_completion: None, + }, + ); + } self.language_server_statuses.insert( server_id, @@ -5409,7 +5797,7 @@ impl LspStore { if file.worktree.read(cx).id() != key.0 || !self .languages - .lsp_adapters(language) + .lsp_adapters(&language.name()) .iter() .any(|a| a.name == key.1) { @@ -5441,7 +5829,7 @@ impl LspStore { lsp::DidOpenTextDocumentParams { text_document: lsp::TextDocumentItem::new( uri, - adapter.language_id(language), + adapter.language_id(&language.name()), version, initial_snapshot.text(), ), @@ -5521,12 +5909,14 @@ impl LspStore { ) -> impl Iterator, &'a Arc)> { self.language_server_ids_for_buffer(buffer, cx) .into_iter() - .filter_map(|server_id| match self.language_servers.get(&server_id)? { - LanguageServerState::Running { - adapter, server, .. - } => Some((adapter, server)), - _ => None, - }) + .filter_map( + |server_id| match self.as_local()?.language_servers.get(&server_id)? { + LanguageServerState::Running { + adapter, server, .. + } => Some((adapter, server)), + _ => None, + }, + ) } pub(crate) fn cancel_language_server_work_for_buffers( @@ -5564,9 +5954,12 @@ impl LspStore { server: Arc, cx: &mut ModelContext, ) { - self.supplementary_language_servers - .insert(id, (name, server)); - cx.emit(LspStoreEvent::LanguageServerAdded(id)); + if let Some(local) = self.as_local_mut() { + local + .supplementary_language_servers + .insert(id, (name, server)); + cx.emit(LspStoreEvent::LanguageServerAdded(id)); + } } pub fn unregister_supplementary_language_server( @@ -5574,27 +5967,33 @@ impl LspStore { id: LanguageServerId, cx: &mut ModelContext, ) { - self.supplementary_language_servers.remove(&id); - cx.emit(LspStoreEvent::LanguageServerRemoved(id)); + if let Some(local) = self.as_local_mut() { + local.supplementary_language_servers.remove(&id); + cx.emit(LspStoreEvent::LanguageServerRemoved(id)); + } } pub fn supplementary_language_servers( &self, - ) -> impl '_ + Iterator { - self.supplementary_language_servers - .iter() - .map(|(id, (name, _))| (id, name)) + ) -> impl '_ + Iterator { + self.as_local().into_iter().flat_map(|local| { + local + .supplementary_language_servers + .iter() + .map(|(id, (name, _))| (*id, name.clone())) + }) } pub fn language_server_adapter_for_id( &self, id: LanguageServerId, ) -> Option> { - if let Some(LanguageServerState::Running { adapter, .. }) = self.language_servers.get(&id) { - Some(adapter.clone()) - } else { - None - } + self.as_local() + .and_then(|local| local.language_servers.get(&id)) + .and_then(|language_server_state| match language_server_state { + LanguageServerState::Running { adapter, .. } => Some(adapter.clone()), + _ => None, + }) } pub(super) fn update_local_worktree_language_servers( @@ -5607,6 +6006,8 @@ impl LspStore { return; } + let Some(local) = self.as_local() else { return }; + let worktree_id = worktree_handle.read(cx).id(); let mut language_server_ids = self .language_server_ids @@ -5621,9 +6022,9 @@ impl LspStore { let abs_path = worktree_handle.read(cx).abs_path(); for server_id in &language_server_ids { if let Some(LanguageServerState::Running { server, .. }) = - self.language_servers.get(server_id) + local.language_servers.get(server_id) { - if let Some(watched_paths) = self + if let Some(watched_paths) = local .language_server_watched_paths .get(server_id) .and_then(|paths| paths.read(cx).worktree_paths.get(&worktree_id)) @@ -5665,8 +6066,11 @@ impl LspStore { token_to_cancel: Option, _cx: &mut ModelContext, ) { + let Some(local) = self.as_local() else { + return; + }; let status = self.language_server_statuses.get(&server_id); - let server = self.language_servers.get(&server_id); + let server = local.language_servers.get(&server_id); if let Some((LanguageServerState::Running { server, .. }, status)) = server.zip(status) { for (token, progress) in &status.pending_work { if let Some(token_to_cancel) = token_to_cancel.as_ref() { @@ -5715,7 +6119,7 @@ impl LspStore { if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) { let worktree_id = file.worktree_id(cx); self.languages - .lsp_adapters(language) + .lsp_adapters(&language.name()) .iter() .flat_map(|adapter| { let key = (worktree_id, adapter.name.clone()); @@ -5777,7 +6181,8 @@ impl LspStore { language_server: Arc, cx: &mut AsyncAppContext, ) -> Result { - let fs = this.update(cx, |this, _| this.fs.clone())?; + let fs = this.read_with(cx, |this, _| this.as_local().unwrap().fs.clone())?; + let mut operations = Vec::new(); if let Some(document_changes) = edit.document_changes { match document_changes { @@ -6207,7 +6612,10 @@ impl LanguageServerWatchedPathsBuilder { while let Some(update) = push_updates.0.next().await { let action = lsp_store .update(&mut cx, |this, cx| { - let Some(watcher) = this + let Some(local) = this.as_local() else { + return ControlFlow::Break(()); + }; + let Some(watcher) = local .language_server_watched_paths .get(&language_server_id) else { @@ -6297,13 +6705,27 @@ pub enum LanguageServerState { Starting(Task>>), Running { - language: Arc, + language: LanguageName, adapter: Arc, server: Arc, simulate_disk_based_diagnostics_completion: Option>, }, } +impl std::fmt::Debug for LanguageServerState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + LanguageServerState::Starting(_) => { + f.debug_struct("LanguageServerState::Starting").finish() + } + LanguageServerState::Running { language, .. } => f + .debug_struct("LanguageServerState::Running") + .field("language", &language) + .finish(), + } + } +} + #[derive(Clone, Debug, Serialize)] pub struct LanguageServerProgress { pub is_disk_based_diagnostics_progress: bool, @@ -6378,24 +6800,136 @@ fn glob_literal_prefix(glob: &str) -> &str { &glob[..literal_end] } +pub struct SshLspAdapter { + name: LanguageServerName, + binary: LanguageServerBinary, + initialization_options: Option, + code_action_kinds: Option>, +} + +impl SshLspAdapter { + pub fn new( + name: LanguageServerName, + binary: LanguageServerBinary, + initialization_options: Option, + code_action_kinds: Option, + ) -> Self { + Self { + name, + binary, + initialization_options, + code_action_kinds: code_action_kinds + .as_ref() + .and_then(|c| serde_json::from_str(c).ok()), + } + } +} + +#[async_trait(?Send)] +impl LspAdapter for SshLspAdapter { + fn name(&self) -> LanguageServerName { + self.name.clone() + } + + async fn initialization_options( + self: Arc, + _: &Arc, + ) -> Result> { + let Some(options) = &self.initialization_options else { + return Ok(None); + }; + let result = serde_json::from_str(options)?; + Ok(result) + } + + fn code_action_kinds(&self) -> Option> { + self.code_action_kinds.clone() + } + + async fn check_if_user_installed( + &self, + _: &dyn LspAdapterDelegate, + _: &AsyncAppContext, + ) -> Option { + Some(self.binary.clone()) + } + + async fn cached_server_binary( + &self, + _: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { + None + } + + async fn fetch_latest_server_version( + &self, + _: &dyn LspAdapterDelegate, + ) -> Result> { + anyhow::bail!("SshLspAdapter does not support fetch_latest_server_version") + } + + async fn fetch_server_binary( + &self, + _: Box, + _: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Result { + anyhow::bail!("SshLspAdapter does not support fetch_server_binary") + } + + async fn installation_test_binary(&self, _: PathBuf) -> Option { + None + } +} + pub struct ProjectLspAdapterDelegate { lsp_store: WeakModel, worktree: worktree::Snapshot, - fs: Arc, + fs: Option>, http_client: Arc, language_registry: Arc, load_shell_env_task: Shared>>>, } impl ProjectLspAdapterDelegate { + fn for_local( + lsp_store: &LspStore, + worktree: &Model, + cx: &mut ModelContext, + ) -> Arc { + let local = lsp_store + .as_local() + .expect("ProjectLspAdapterDelegate cannot be constructed on a remote"); + + let http_client = local + .http_client + .clone() + .unwrap_or_else(|| Arc::new(BlockedHttpClient)); + + Self::new(lsp_store, worktree, http_client, Some(local.fs.clone()), cx) + } + + fn for_ssh( + lsp_store: &LspStore, + worktree: &Model, + cx: &mut ModelContext, + ) -> Arc { + Self::new(lsp_store, worktree, Arc::new(BlockedHttpClient), None, cx) + } + pub fn new( lsp_store: &LspStore, worktree: &Model, + http_client: Arc, + fs: Option>, cx: &mut ModelContext, ) -> Arc { let worktree_id = worktree.read(cx).id(); let worktree_abs_path = worktree.read(cx).abs_path(); - let load_shell_env_task = if let Some(environment) = &lsp_store.environment { + let load_shell_env_task = if let Some(environment) = + &lsp_store.as_local().map(|local| local.environment.clone()) + { environment.update(cx, |env, cx| { env.get_environment(Some(worktree_id), Some(worktree_abs_path), cx) }) @@ -6403,14 +6937,10 @@ impl ProjectLspAdapterDelegate { Task::ready(None).shared() }; - let Some(http_client) = lsp_store.http_client.clone() else { - panic!("ProjectLspAdapterDelegate cannot be constructedd on an ssh-remote yet") - }; - Arc::new(Self { lsp_store: cx.weak_model(), worktree: worktree.read(cx).snapshot(), - fs: lsp_store.fs.clone(), + fs, http_client, language_registry: lsp_store.languages.clone(), load_shell_env_task, @@ -6418,6 +6948,26 @@ impl ProjectLspAdapterDelegate { } } +struct BlockedHttpClient; + +impl HttpClient for BlockedHttpClient { + fn send( + &self, + _req: Request, + ) -> BoxFuture<'static, Result, Error>> { + Box::pin(async { + Err(std::io::Error::new( + std::io::ErrorKind::PermissionDenied, + "ssh host blocked http connection", + ) + .into()) + }) + } + + fn proxy(&self) -> Option<&Uri> { + None + } +} #[async_trait] impl LspAdapterDelegate for ProjectLspAdapterDelegate { fn show_notification(&self, message: &str, cx: &mut AppContext) { @@ -6447,6 +6997,7 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { #[cfg(not(target_os = "windows"))] async fn which(&self, command: &OsStr) -> Option { + self.fs.as_ref()?; let worktree_abs_path = self.worktree.abs_path(); let shell_path = self.shell_env().await.get("PATH").cloned(); which::which_in(command, shell_path.as_ref(), worktree_abs_path).ok() @@ -6454,6 +7005,8 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { #[cfg(target_os = "windows")] async fn which(&self, command: &OsStr) -> Option { + self.fs.as_ref()?; + // todo(windows) Getting the shell env variables in a current directory on Windows is more complicated than other platforms // there isn't a 'default shell' necessarily. The closest would be the default profile on the windows terminal // SEE: https://learn.microsoft.com/en-us/windows/terminal/customize-settings/startup @@ -6472,17 +7025,20 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { async fn read_text_file(&self, path: PathBuf) -> Result { if self.worktree.entry_for_path(&path).is_none() { return Err(anyhow!("no such path {path:?}")); + }; + if let Some(fs) = &self.fs { + let content = fs.load(&path).await?; + Ok(content) + } else { + return Err(anyhow!("cannot open {path:?} on ssh host (yet!)")); } - let path = self.worktree.absolutize(path.as_ref())?; - let content = self.fs.load(&path).await?; - Ok(content) } } async fn populate_labels_for_symbols( symbols: Vec, language_registry: &Arc, - default_language: Option>, + default_language: Option, lsp_adapter: Option>, output: &mut Vec, ) { @@ -6497,7 +7053,12 @@ async fn populate_labels_for_symbols( .ok() .or_else(|| { unknown_path.get_or_insert(symbol.path.path.clone()); - default_language.clone() + default_language.as_ref().and_then(|name| { + language_registry + .language_for_name(&name.0) + .now_or_never()? + .ok() + }) }); symbols_by_language .entry(language) @@ -6523,9 +7084,12 @@ async fn populate_labels_for_symbols( let mut labels = Vec::new(); if let Some(language) = language { - let lsp_adapter = lsp_adapter - .clone() - .or_else(|| language_registry.lsp_adapters(&language).first().cloned()); + let lsp_adapter = lsp_adapter.clone().or_else(|| { + language_registry + .lsp_adapters(&language.name()) + .first() + .cloned() + }); if let Some(lsp_adapter) = lsp_adapter { labels = lsp_adapter .labels_for_symbols(&label_params, &language) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index ed489af687d753..f67423b0738514 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -107,7 +107,7 @@ pub use buffer_store::ProjectTransaction; pub use lsp_store::{ DiagnosticSummary, LanguageServerLogType, LanguageServerProgress, LanguageServerPromptRequest, LanguageServerStatus, LanguageServerToQuery, LspStore, LspStoreEvent, - ProjectLspAdapterDelegate, SERVER_PROGRESS_THROTTLE_TIMEOUT, + SERVER_PROGRESS_THROTTLE_TIMEOUT, }; const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500; @@ -643,16 +643,13 @@ impl Project { let environment = ProjectEnvironment::new(&worktree_store, env, cx); let lsp_store = cx.new_model(|cx| { - LspStore::new( + LspStore::new_local( buffer_store.clone(), worktree_store.clone(), - Some(environment.clone()), + environment.clone(), languages.clone(), Some(client.http_client()), fs.clone(), - None, - None, - None, cx, ) }); @@ -712,16 +709,89 @@ impl Project { fs: Arc, cx: &mut AppContext, ) -> Model { - let this = Self::local(client, node, user_store, languages, fs, None, cx); - this.update(cx, |this, cx| { - let client: AnyProtoClient = ssh.clone().into(); + cx.new_model(|cx: &mut ModelContext| { + let (tx, rx) = mpsc::unbounded(); + cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) + .detach(); + let tasks = Inventory::new(cx); + let global_snippets_dir = paths::config_dir().join("snippets"); + let snippets = + SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); + + let worktree_store = cx.new_model(|_| { + let mut worktree_store = WorktreeStore::new(false, fs.clone()); + worktree_store.set_upstream_client(ssh.clone().into()); + worktree_store + }); + cx.subscribe(&worktree_store, Self::on_worktree_store_event) + .detach(); + + let buffer_store = + cx.new_model(|cx| BufferStore::new(worktree_store.clone(), None, cx)); + cx.subscribe(&buffer_store, Self::on_buffer_store_event) + .detach(); - this.worktree_store.update(cx, |store, _cx| { - store.set_upstream_client(client.clone()); + let settings_observer = cx.new_model(|cx| { + SettingsObserver::new_ssh(ssh.clone().into(), worktree_store.clone(), cx) }); - this.settings_observer = cx.new_model(|cx| { - SettingsObserver::new_ssh(ssh.clone().into(), this.worktree_store.clone(), cx) + + let environment = ProjectEnvironment::new(&worktree_store, None, cx); + let lsp_store = cx.new_model(|cx| { + LspStore::new_ssh( + buffer_store.clone(), + worktree_store.clone(), + languages.clone(), + ssh.clone().into(), + 0, + cx, + ) }); + cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach(); + + let this = Self { + buffer_ordered_messages_tx: tx, + collaborators: Default::default(), + worktree_store, + buffer_store, + lsp_store, + current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), + join_project_response_message_id: 0, + client_state: ProjectClientState::Local, + client_subscriptions: Vec::new(), + _subscriptions: vec![ + cx.observe_global::(Self::on_settings_changed), + cx.on_release(Self::release), + ], + active_entry: None, + snippets, + languages, + client, + user_store, + settings_observer, + fs, + ssh_session: Some(ssh.clone()), + buffers_needing_diff: Default::default(), + git_diff_debouncer: DebouncedDelay::new(), + terminals: Terminals { + local_handles: Vec::new(), + }, + node: Some(node), + default_prettier: DefaultPrettier::default(), + prettiers_per_worktree: HashMap::default(), + prettier_instances: HashMap::default(), + tasks, + hosted_project_id: None, + dev_server_project_id: None, + search_history: Self::new_search_history(), + environment, + remotely_created_buffers: Default::default(), + last_formatting_failure: None, + buffers_being_formatted: Default::default(), + search_included_history: Self::new_search_history(), + search_excluded_history: Self::new_search_history(), + }; + + let client: AnyProtoClient = ssh.clone().into(); ssh.subscribe_to_entity(SSH_PROJECT_ID, &cx.handle()); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.buffer_store); @@ -735,9 +805,8 @@ impl Project { LspStore::init(&client); SettingsObserver::init(&client); - this.ssh_session = Some(ssh); - }); - this + this + }) } pub async fn remote( @@ -820,16 +889,12 @@ impl Project { cx.new_model(|cx| BufferStore::new(worktree_store.clone(), Some(remote_id), cx))?; let lsp_store = cx.new_model(|cx| { - let mut lsp_store = LspStore::new( + let mut lsp_store = LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), - None, languages.clone(), - Some(client.http_client()), - fs.clone(), - None, - Some(client.clone().into()), - Some(remote_id), + client.clone().into(), + remote_id, cx, ); lsp_store.set_language_server_statuses_from_proto(response.payload.language_servers); @@ -1125,8 +1190,7 @@ impl Project { if let Some(language) = buffer_language { if settings.enable_language_server { if let Some(file) = buffer_file { - language_servers_to_start - .push((file.worktree.clone(), Arc::clone(language))); + language_servers_to_start.push((file.worktree.clone(), language.name())); } } language_formatters_to_check @@ -1144,7 +1208,7 @@ impl Project { let language = languages.iter().find_map(|l| { let adapter = self .languages - .lsp_adapters(l) + .lsp_adapters(&l.name()) .iter() .find(|adapter| adapter.name == started_lsp_name)? .clone(); @@ -1165,11 +1229,11 @@ impl Project { ) { (None, None) => {} (Some(_), None) | (None, Some(_)) => { - language_servers_to_restart.push((worktree, Arc::clone(language))); + language_servers_to_restart.push((worktree, language.name())); } (Some(current_lsp_settings), Some(new_lsp_settings)) => { if current_lsp_settings != new_lsp_settings { - language_servers_to_restart.push((worktree, Arc::clone(language))); + language_servers_to_restart.push((worktree, language.name())); } } } @@ -4777,7 +4841,7 @@ impl Project { pub fn supplementary_language_servers<'a>( &'a self, cx: &'a AppContext, - ) -> impl '_ + Iterator { + ) -> impl '_ + Iterator { self.lsp_store.read(cx).supplementary_language_servers() } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index c2af1c3597d1e9..70b2eccf237c62 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -19,7 +19,7 @@ use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId}; use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent}; -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] pub struct ProjectSettings { /// Configuration for language servers. /// diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index ffa206684fbadc..4662c75477d45f 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -6,7 +6,7 @@ use http_client::Url; use language::{ language_settings::{AllLanguageSettings, LanguageSettingsContent}, tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter, - LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint, + LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint, }; use lsp::{DiagnosticSeverity, NumberOrString}; use parking_lot::Mutex; @@ -1559,7 +1559,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { settings.languages.insert( - Arc::from("Rust"), + "Rust".into(), LanguageSettingsContent { enable_language_server: Some(false), ..Default::default() @@ -1578,14 +1578,14 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { settings.languages.insert( - Arc::from("Rust"), + LanguageName::new("Rust"), LanguageSettingsContent { enable_language_server: Some(true), ..Default::default() }, ); settings.languages.insert( - Arc::from("JavaScript"), + LanguageName::new("JavaScript"), LanguageSettingsContent { enable_language_server: Some(false), ..Default::default() @@ -2983,7 +2983,7 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) { buffer.edit([(0..0, "abc")], None, cx); assert!(buffer.is_dirty()); assert!(!buffer.has_conflict()); - assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text"); + assert_eq!(buffer.language().unwrap().name(), "Plain Text".into()); }); project .update(cx, |project, cx| { @@ -3006,7 +3006,7 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) { ); assert!(!buffer.is_dirty()); assert!(!buffer.has_conflict()); - assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust"); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); }); let opened_buffer = project @@ -5308,7 +5308,7 @@ fn json_lang() -> Arc { fn js_lang() -> Arc { Arc::new(Language::new( LanguageConfig { - name: Arc::from("JavaScript"), + name: "JavaScript".into(), matcher: LanguageMatcher { path_suffixes: vec!["js".to_string()], ..Default::default() diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 95ae6aee13f71a..314903ec5da439 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -161,7 +161,7 @@ impl Inventory { cx: &AppContext, ) -> Vec<(TaskSourceKind, TaskTemplate)> { let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language { - name: language.name(), + name: language.name().0, }); let language_tasks = language .and_then(|language| language.context_provider()?.associated_tasks(file, cx)) @@ -207,7 +207,7 @@ impl Inventory { .as_ref() .and_then(|location| location.buffer.read(cx).language_at(location.range.start)); let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language { - name: language.name(), + name: language.name().0, }); let file = location .as_ref() diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 3d464904b8bf2d..b24d9399655b11 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -281,7 +281,9 @@ message Envelope { FindSearchCandidatesResponse find_search_candidates_response = 244; CloseBuffer close_buffer = 245; - UpdateUserSettings update_user_settings = 246; // current max + UpdateUserSettings update_user_settings = 246; + + CreateLanguageServer create_language_server = 247; // current max } reserved 158 to 161; @@ -2497,3 +2499,36 @@ message UpdateUserSettings { uint64 project_id = 1; string content = 2; } + +message LanguageServerCommand { + string path = 1; + repeated string arguments = 2; +} + +message AvailableLanguage { + string name = 7; + string matcher = 8; +} + +message CreateLanguageServer { + uint64 project_id = 1; + uint64 worktree_id = 2; + string name = 3; + + LanguageServerCommand binary = 4; + optional string initialization_options = 5; + optional string code_action_kinds = 6; + + AvailableLanguage language = 7; +} + +// message RestartLanguageServer { + +// } +// message DestroyLanguageServer { + +// } + +// message LspWorkspaceConfiguration { + +// } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index d8ebf665885b45..44cb91db1080b8 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -366,7 +366,8 @@ messages!( (FindSearchCandidates, Background), (FindSearchCandidatesResponse, Background), (CloseBuffer, Foreground), - (UpdateUserSettings, Foreground) + (UpdateUserSettings, Foreground), + (CreateLanguageServer, Foreground) ); request_messages!( @@ -490,6 +491,7 @@ request_messages!( (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (AddWorktree, AddWorktreeResponse), + (CreateLanguageServer, Ack) ); entity_messages!( @@ -562,7 +564,8 @@ entity_messages!( UpdateContext, SynchronizeContexts, LspExtSwitchSourceHeader, - UpdateUserSettings + UpdateUserSettings, + CreateLanguageServer ); entity_messages!( diff --git a/crates/quick_action_bar/src/repl_menu.rs b/crates/quick_action_bar/src/repl_menu.rs index fbf2ac17e55a99..f4e4cd2d1ad20e 100644 --- a/crates/quick_action_bar/src/repl_menu.rs +++ b/crates/quick_action_bar/src/repl_menu.rs @@ -62,7 +62,7 @@ impl QuickActionBar { return self.render_repl_launch_menu(spec, cx); } SessionSupport::RequiresSetup(language) => { - return self.render_repl_setup(&language, cx); + return self.render_repl_setup(&language.0, cx); } SessionSupport::Unsupported => return None, }; diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 4bee1c5a9fdaa3..8da4284b7f56a7 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -291,11 +291,24 @@ impl SshClientDelegate { self.update_status(Some("building remote server binary from source"), cx); log::info!("building remote server binary from source"); - run_cmd(Command::new("cargo").args(["build", "--package", "remote_server"])).await?; - run_cmd(Command::new("strip").args(["target/debug/remote_server"])).await?; - run_cmd(Command::new("gzip").args(["-9", "-f", "target/debug/remote_server"])).await?; + run_cmd(Command::new("cargo").args([ + "build", + "--package", + "remote_server", + "--target-dir", + "target/remote_server", + ])) + .await?; + // run_cmd(Command::new("strip").args(["target/remote_server/debug/remote_server"])) + // .await?; + run_cmd(Command::new("gzip").args([ + "-9", + "-f", + "target/remote_server/debug/remote_server", + ])) + .await?; - let path = std::env::current_dir()?.join("target/debug/remote_server.gz"); + let path = std::env::current_dir()?.join("target/remote_server/debug/remote_server.gz"); return Ok((path, version)); async fn run_cmd(command: &mut Command) -> Result<()> { diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 91f7b330e4a0e3..5ff11fe09931c9 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -41,11 +41,11 @@ pub struct SshSocket { pub struct SshSession { next_message_id: AtomicU32, - response_channels: ResponseChannels, + response_channels: ResponseChannels, // Lock outgoing_tx: mpsc::UnboundedSender, spawn_process_tx: mpsc::UnboundedSender, client_socket: Option, - state: Mutex, + state: Mutex, // Lock } struct SshClientState { @@ -392,9 +392,9 @@ impl SshSession { ) -> impl 'static + Future> { envelope.id = self.next_message_id.fetch_add(1, SeqCst); let (tx, rx) = oneshot::channel(); - self.response_channels - .lock() - .insert(MessageId(envelope.id), tx); + let mut response_channels_lock = self.response_channels.lock(); + response_channels_lock.insert(MessageId(envelope.id), tx); + drop(response_channels_lock); self.outgoing_tx.unbounded_send(envelope).ok(); async move { Ok(rx.await.context("connection lost")?.0) } } diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 60f29bb573fbf8..ca5fe06e137c7f 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -4,14 +4,13 @@ use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext, Task}; use language::LanguageRegistry; use project::{ buffer_store::BufferStore, project_settings::SettingsObserver, search::SearchQuery, - worktree_store::WorktreeStore, LspStore, ProjectPath, WorktreeId, WorktreeSettings, + worktree_store::WorktreeStore, LspStore, ProjectPath, WorktreeId, }; use remote::SshSession; use rpc::{ proto::{self, AnyProtoClient, SSH_PEER_ID, SSH_PROJECT_ID}, TypedEnvelope, }; -use settings::Settings as _; use smol::stream::StreamExt; use std::{ path::{Path, PathBuf}, @@ -33,15 +32,17 @@ impl HeadlessProject { pub fn init(cx: &mut AppContext) { settings::init(cx); language::init(cx); - WorktreeSettings::register(cx); + project::Project::init_settings(cx); } pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { // TODO: we should load the env correctly (as we do in login_shell_env_loaded when stdout is not a pty). Can we re-use the ProjectEnvironment for that? - let languages = Arc::new(LanguageRegistry::new( - Task::ready(()), - cx.background_executor().clone(), - )); + let mut languages = + LanguageRegistry::new(Task::ready(()), cx.background_executor().clone()); + languages + .set_language_server_download_dir(PathBuf::from("/Users/conrad/what-could-go-wrong")); + + let languages = Arc::new(languages); let worktree_store = cx.new_model(|_| WorktreeStore::new(true, fs.clone())); let buffer_store = cx.new_model(|cx| { @@ -57,18 +58,17 @@ impl HeadlessProject { }); let environment = project::ProjectEnvironment::new(&worktree_store, None, cx); let lsp_store = cx.new_model(|cx| { - LspStore::new( + let mut lsp_store = LspStore::new_local( buffer_store.clone(), worktree_store.clone(), - Some(environment), + environment, languages, None, fs.clone(), - Some(session.clone().into()), - None, - Some(0), cx, - ) + ); + lsp_store.shared(SSH_PROJECT_ID, session.clone().into(), cx); + lsp_store }); let client: AnyProtoClient = session.clone().into(); @@ -88,9 +88,12 @@ impl HeadlessProject { client.add_model_request_handler(BufferStore::handle_update_buffer); client.add_model_message_handler(BufferStore::handle_close_buffer); + client.add_model_request_handler(LspStore::handle_create_language_server); + BufferStore::init(&client); WorktreeStore::init(&client); SettingsObserver::init(&client); + LspStore::init(&client); HeadlessProject { session: client, diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 0aea585538f245..67a2f0b57d8cba 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -6,7 +6,7 @@ use gpui::{Context, Model, TestAppContext}; use http_client::FakeHttpClient; use language::{ language_settings::{all_language_settings, AllLanguageSettings}, - Buffer, LanguageRegistry, + Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, }; use node_runtime::FakeNodeRuntime; use project::{ @@ -202,15 +202,29 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo server_cx.read(|cx| { assert_eq!( AllLanguageSettings::get_global(cx) - .language(Some("Rust")) + .language(Some(&"Rust".into())) .language_servers, ["custom-rust-analyzer".into()] ) }); - fs.insert_tree("/code/project1/.zed", json!({ - "settings.json": r#"{"languages":{"Rust":{"language_servers":["override-rust-analyzer"]}}}"# - })).await; + fs.insert_tree( + "/code/project1/.zed", + json!({ + "settings.json": r#" + { + "languages": {"Rust":{"language_servers":["override-rust-analyzer"]}}, + "lsp": { + "override-rust-analyzer": { + "binary": { + "path": "~/.cargo/bin/rust-analyzer" + } + } + } + }"# + }), + ) + .await; let worktree_id = project .update(cx, |project, cx| { @@ -247,7 +261,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo }), cx ) - .language(Some("Rust")) + .language(Some(&"Rust".into())) .language_servers, ["override-rust-analyzer".into()] ) @@ -257,13 +271,107 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo let file = buffer.read(cx).file(); assert_eq!( all_language_settings(file, cx) - .language(Some("Rust")) + .language(Some(&"Rust".into())) .language_servers, ["override-rust-analyzer".into()] ) }); } +#[gpui::test] +async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let (project, headless, fs) = init_test(cx, server_cx).await; + + fs.insert_tree( + "/code/project1/.zed", + json!({ + "settings.json": r#" + { + "languages": {"Rust":{"language_servers":["rust-analyzer"]}}, + "lsp": { + "rust-analyzer": { + "binary": { + "path": "~/.cargo/bin/rust-analyzer" + } + } + } + }"# + }), + ) + .await; + + cx.update_model(&project, |project, _| { + project.languages().register_test_language(LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".into()], + ..Default::default() + }, + ..Default::default() + }); + project.languages().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + name: "rust-analyzer", + ..Default::default() + }, + ) + }); + cx.run_until_parked(); + + let worktree_id = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap() + .0 + .read_with(cx, |worktree, _| worktree.id()); + + // Wait for the settings to synchronize + cx.run_until_parked(); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + }) + .await + .unwrap(); + cx.run_until_parked(); + + cx.read(|cx| { + let file = buffer.read(cx).file(); + assert_eq!( + all_language_settings(file, cx) + .language(Some(&"Rust".into())) + .language_servers, + ["rust-analyzer".into()] + ) + }); + + let buffer_id = cx.read(|cx| { + let buffer = buffer.read(cx); + assert_eq!(buffer.language().unwrap().name(), "Rust".into()); + buffer.remote_id() + }); + + server_cx.read(|cx| { + let buffer = headless + .read(cx) + .buffer_store + .read(cx) + .get(buffer_id) + .unwrap(); + + assert_eq!(buffer.read(cx).language().unwrap().name(), "Rust".into()); + }); + + server_cx.read(|cx| { + let lsp_store = headless.read(cx).lsp_store.read(cx); + assert_eq!(lsp_store.as_local().unwrap().language_servers.len(), 1); + }); +} + fn init_logger() { if std::env::var("RUST_LOG").is_ok() { env_logger::try_init().ok(); diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index 112cf591e99398..868594aaf18185 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -6,7 +6,7 @@ use std::sync::Arc; use anyhow::{Context, Result}; use editor::Editor; use gpui::{prelude::*, AppContext, Entity, View, WeakView, WindowContext}; -use language::{BufferSnapshot, Language, Point}; +use language::{BufferSnapshot, Language, LanguageName, Point}; use crate::repl_store::ReplStore; use crate::session::SessionEvent; @@ -99,7 +99,7 @@ pub fn run(editor: WeakView, move_down: bool, cx: &mut WindowContext) -> pub enum SessionSupport { ActiveSession(View), Inactive(Box), - RequiresSetup(Arc), + RequiresSetup(LanguageName), Unsupported, } @@ -268,7 +268,7 @@ fn runnable_ranges( range: Range, ) -> (Vec>, Option) { if let Some(language) = buffer.language() { - if language.name().as_ref() == "Markdown" { + if language.name() == "Markdown".into() { return (markdown_code_blocks(buffer, range.clone()), None); } } @@ -305,7 +305,7 @@ fn markdown_code_blocks(buffer: &BufferSnapshot, range: Range) -> Vec) -> bool { - match language.name().as_ref() { + match language.name().0.as_ref() { "TypeScript" | "Python" => true, _ => false, } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 82aad401a4d858..c6e64deb59d1b1 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -564,6 +564,13 @@ impl Worktree { !self.is_local() } + pub fn settings_location(&self, _: &ModelContext) -> SettingsLocation<'static> { + SettingsLocation { + worktree_id: self.id(), + path: Path::new(EMPTY_PATH), + } + } + pub fn snapshot(&self) -> Snapshot { match self { Worktree::Local(worktree) => worktree.snapshot.snapshot.clone(), diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 9ec43d607ae519..93fee57ecdb654 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -2251,14 +2251,8 @@ mod tests { assert!(!editor.is_dirty(cx)); assert_eq!(editor.title(cx), "the-new-name.rs"); assert_eq!( - editor - .buffer() - .read(cx) - .language_at(0, cx) - .unwrap() - .name() - .as_ref(), - "Rust" + editor.buffer().read(cx).language_at(0, cx).unwrap().name(), + "Rust".into() ); }); }) @@ -2374,14 +2368,8 @@ mod tests { editor.update(cx, |editor, cx| { assert!(!editor.is_dirty(cx)); assert_eq!( - editor - .buffer() - .read(cx) - .language_at(0, cx) - .unwrap() - .name() - .as_ref(), - "Rust" + editor.buffer().read(cx).language_at(0, cx).unwrap().name(), + "Rust".into() ) }); }) From 19463b59e2fb9ea2c14bf5f75c16891ca9e204f5 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 10 Sep 2024 16:12:45 -0400 Subject: [PATCH 030/762] Add docs for search settings (#17662) --- docs/src/configuring-zed.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index b935a8b824ccf0..82f5a244845b57 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1134,6 +1134,21 @@ These values take in the same options as the root-level settings with the same n `boolean` values +## Search + +- Description: Search options to enable by default when opening new project and buffer searches. +- Setting: `search` +- Default: + +``` +"search": { + "whole_word": false, + "case_sensitive": false, + "include_ignored": false, + "regex": false +}, +``` + ## Show Call Status Icon - Description: Whether or not to show the call status icon in the status bar. From 48a16f9e70715316c0d189d9053963742dd79140 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 10 Sep 2024 16:41:29 -0400 Subject: [PATCH 031/762] ssh: Lookup language servers in env on SSH host (#17658) Release Notes: - ssh remoting: Lookup language server binaries in environment on SSH host --------- Co-authored-by: Bennet --- crates/assistant/src/assistant_panel.rs | 2 +- crates/language/src/language.rs | 8 + crates/project/src/lsp_store.rs | 159 +++++++++++++++---- crates/proto/proto/zed.proto | 28 +++- crates/proto/src/proto.rs | 14 +- crates/remote_server/src/headless_project.rs | 2 + 6 files changed, 181 insertions(+), 32 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 7eebc97b1d91e8..a0d4cbcf815dab 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -5349,7 +5349,7 @@ fn make_lsp_adapter_delegate( let http_client = project.client().http_client().clone(); project.lsp_store().update(cx, |lsp_store, cx| { Ok( - ProjectLspAdapterDelegate::new(lsp_store, &worktree, http_client, fs, cx) + ProjectLspAdapterDelegate::new(lsp_store, &worktree, http_client, fs, None, cx) as Arc, ) }) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 6424da8a54280e..cd39490d0bbdf4 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1651,6 +1651,14 @@ impl LspAdapter for FakeLspAdapter { LanguageServerName(self.name.into()) } + async fn check_if_user_installed( + &self, + _: &dyn LspAdapterDelegate, + _: &AsyncAppContext, + ) -> Option { + Some(self.language_server_binary.clone()) + } + fn get_language_server_command<'a>( self: Arc, _: Arc, diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index b218ac58042f11..3b6b9ebb0a636d 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -442,6 +442,17 @@ impl LspStore { } } + fn worktree_for_id( + &self, + worktree_id: WorktreeId, + cx: &ModelContext, + ) -> Result> { + self.worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .ok_or_else(|| anyhow!("worktree not found")) + } + fn on_buffer_store_event( &mut self, _: Model, @@ -4287,6 +4298,7 @@ impl LspStore { .ok_or_else(|| anyhow!("missing language"))?; let language_name = LanguageName::from_proto(language.name); let matcher: LanguageMatcher = serde_json::from_str(&language.matcher)?; + this.update(&mut cx, |this, cx| { this.languages .register_language(language_name.clone(), None, matcher.clone(), { @@ -4334,6 +4346,47 @@ impl LspStore { Ok(proto::Ack {}) } + pub async fn handle_which_command( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let command = PathBuf::from(envelope.payload.command); + let response = this + .update(&mut cx, |this, cx| { + let worktree = this.worktree_for_id(worktree_id, cx)?; + let delegate = ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + anyhow::Ok( + cx.spawn(|_, _| async move { delegate.which(command.as_os_str()).await }), + ) + })?? + .await; + + Ok(proto::WhichCommandResponse { + path: response.map(|path| path.to_string_lossy().to_string()), + }) + } + + pub async fn handle_shell_env( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let response = this + .update(&mut cx, |this, cx| { + let worktree = this.worktree_for_id(worktree_id, cx)?; + let delegate = ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + anyhow::Ok(cx.spawn(|_, _| async move { delegate.shell_env().await })) + })?? + .await; + + Ok(proto::ShellEnvResponse { + env: response.into_iter().collect(), + }) + } + async fn handle_apply_additional_edits_for_completion( this: Model, envelope: TypedEnvelope, @@ -4478,39 +4531,34 @@ impl LspStore { ) { let ssh = self.as_ssh().unwrap(); - let configured_binary = ProjectSettings::get( - Some(worktree.update(cx, |worktree, cx| worktree.settings_location(cx))), - cx, - ) - .lsp - .get(&adapter.name()) - .and_then(|c| c.binary.as_ref()) - .and_then(|config| { - if let Some(path) = &config.path { - Some((path.clone(), config.arguments.clone().unwrap_or_default())) - } else { - None - } - }); let delegate = - ProjectLspAdapterDelegate::for_ssh(self, worktree, cx) as Arc; + ProjectLspAdapterDelegate::for_ssh(self, worktree, ssh.upstream_client.clone(), cx) + as Arc; + + // TODO: We should use `adapter` here instead of reaching through the `CachedLspAdapter`. + let lsp_adapter = adapter.adapter.clone(); + let project_id = self.project_id; let worktree_id = worktree.read(cx).id().to_proto(); let upstream_client = ssh.upstream_client.clone(); let name = adapter.name().to_string(); - let Some((path, arguments)) = configured_binary else { - cx.emit(LspStoreEvent::Notification(format!( - "ssh-remoting currently requires manually configuring {} in your settings", - adapter.name() - ))); - return; - }; + let Some(available_language) = self.languages.available_language_for_name(&language) else { log::error!("failed to find available language {language}"); return; }; - let task = cx.spawn(|_, _| async move { - let delegate = delegate; + + let task = cx.spawn(|_, cx| async move { + let user_binary_task = lsp_adapter.check_if_user_installed(delegate.as_ref(), &cx); + let binary = match user_binary_task.await { + Some(binary) => binary, + None => { + return Err(anyhow!( + "Downloading language server for ssh host is not supported yet" + )) + } + }; + let name = adapter.name().to_string(); let code_action_kinds = adapter .adapter @@ -4523,12 +4571,22 @@ impl LspStore { .map(|options| serde_json::to_string(&options)) .transpose()?; + let language_server_command = proto::LanguageServerCommand { + path: binary.path.to_string_lossy().to_string(), + arguments: binary + .arguments + .iter() + .map(|args| args.to_string_lossy().to_string()) + .collect(), + env: binary.env.unwrap_or_default().into_iter().collect(), + }; + upstream_client .request(proto::CreateLanguageServer { project_id, worktree_id, name, - binary: Some(proto::LanguageServerCommand { path, arguments }), + binary: Some(language_server_command), initialization_options, code_action_kinds, language: Some(proto::AvailableLanguage { @@ -6890,6 +6948,7 @@ pub struct ProjectLspAdapterDelegate { http_client: Arc, language_registry: Arc, load_shell_env_task: Shared>>>, + upstream_client: Option, } impl ProjectLspAdapterDelegate { @@ -6907,15 +6966,30 @@ impl ProjectLspAdapterDelegate { .clone() .unwrap_or_else(|| Arc::new(BlockedHttpClient)); - Self::new(lsp_store, worktree, http_client, Some(local.fs.clone()), cx) + Self::new( + lsp_store, + worktree, + http_client, + Some(local.fs.clone()), + None, + cx, + ) } fn for_ssh( lsp_store: &LspStore, worktree: &Model, + upstream_client: AnyProtoClient, cx: &mut ModelContext, ) -> Arc { - Self::new(lsp_store, worktree, Arc::new(BlockedHttpClient), None, cx) + Self::new( + lsp_store, + worktree, + Arc::new(BlockedHttpClient), + None, + Some(upstream_client), + cx, + ) } pub fn new( @@ -6923,6 +6997,7 @@ impl ProjectLspAdapterDelegate { worktree: &Model, http_client: Arc, fs: Option>, + upstream_client: Option, cx: &mut ModelContext, ) -> Arc { let worktree_id = worktree.read(cx).id(); @@ -6942,6 +7017,7 @@ impl ProjectLspAdapterDelegate { worktree: worktree.read(cx).snapshot(), fs, http_client, + upstream_client, language_registry: lsp_store.languages.clone(), load_shell_env_task, }) @@ -6991,13 +7067,42 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { } async fn shell_env(&self) -> HashMap { + if let Some(upstream_client) = &self.upstream_client { + use rpc::proto::SSH_PROJECT_ID; + + return upstream_client + .request(proto::ShellEnv { + project_id: SSH_PROJECT_ID, + worktree_id: self.worktree_id().to_proto(), + }) + .await + .map(|response| response.env.into_iter().collect()) + .unwrap_or_default(); + } + let task = self.load_shell_env_task.clone(); task.await.unwrap_or_default() } #[cfg(not(target_os = "windows"))] async fn which(&self, command: &OsStr) -> Option { + if let Some(upstream_client) = &self.upstream_client { + use rpc::proto::SSH_PROJECT_ID; + + return upstream_client + .request(proto::WhichCommand { + project_id: SSH_PROJECT_ID, + worktree_id: self.worktree_id().to_proto(), + command: command.to_string_lossy().to_string(), + }) + .await + .log_err() + .and_then(|response| response.path) + .map(PathBuf::from); + } + self.fs.as_ref()?; + let worktree_abs_path = self.worktree.abs_path(); let shell_path = self.shell_env().await.get("PATH").cloned(); which::which_in(command, shell_path.as_ref(), worktree_abs_path).ok() diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index b24d9399655b11..e5d767fffb3fae 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -283,7 +283,13 @@ message Envelope { CloseBuffer close_buffer = 245; UpdateUserSettings update_user_settings = 246; - CreateLanguageServer create_language_server = 247; // current max + CreateLanguageServer create_language_server = 247; + + WhichCommand which_command = 248; + WhichCommandResponse which_command_response = 249; + + ShellEnv shell_env = 250; + ShellEnvResponse shell_env_response = 251; // current max } reserved 158 to 161; @@ -2503,6 +2509,7 @@ message UpdateUserSettings { message LanguageServerCommand { string path = 1; repeated string arguments = 2; + map env = 3; } message AvailableLanguage { @@ -2522,6 +2529,25 @@ message CreateLanguageServer { AvailableLanguage language = 7; } +message WhichCommand { + uint64 project_id = 1; + uint64 worktree_id = 2; + string command = 3; +} + +message WhichCommandResponse { + optional string path = 1; +} + +message ShellEnv { + uint64 project_id = 1; + uint64 worktree_id = 2; +} + +message ShellEnvResponse { + map env = 1; +} + // message RestartLanguageServer { // } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 44cb91db1080b8..7af66a6a6be093 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -367,7 +367,11 @@ messages!( (FindSearchCandidatesResponse, Background), (CloseBuffer, Foreground), (UpdateUserSettings, Foreground), - (CreateLanguageServer, Foreground) + (CreateLanguageServer, Foreground), + (WhichCommand, Foreground), + (WhichCommandResponse, Foreground), + (ShellEnv, Foreground), + (ShellEnvResponse, Foreground), ); request_messages!( @@ -491,7 +495,9 @@ request_messages!( (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (AddWorktree, AddWorktreeResponse), - (CreateLanguageServer, Ack) + (CreateLanguageServer, Ack), + (WhichCommand, WhichCommandResponse), + (ShellEnv, ShellEnvResponse) ); entity_messages!( @@ -565,7 +571,9 @@ entity_messages!( SynchronizeContexts, LspExtSwitchSourceHeader, UpdateUserSettings, - CreateLanguageServer + CreateLanguageServer, + WhichCommand, + ShellEnv ); entity_messages!( diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index ca5fe06e137c7f..e654e2a190ca73 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -89,6 +89,8 @@ impl HeadlessProject { client.add_model_message_handler(BufferStore::handle_close_buffer); client.add_model_request_handler(LspStore::handle_create_language_server); + client.add_model_request_handler(LspStore::handle_which_command); + client.add_model_request_handler(LspStore::handle_shell_env); BufferStore::init(&client); WorktreeStore::init(&client); From 3cea7ccbff438c331ce6e11cd9de78a7c3b1e1b0 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 17:26:06 -0400 Subject: [PATCH 032/762] pane: Fix pinned tabs being persisted after closing (#17666) Release Notes: - Fixed tabs staying pinned after closing unrelated tabs --- crates/workspace/src/pane.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index e2a77402de70a7..23148d6376f456 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1364,6 +1364,9 @@ impl Pane { self.activation_history .retain(|entry| entry.entity_id != self.items[item_index].item_id()); + if self.is_tab_pinned(item_index) { + self.pinned_tab_count -= 1; + } if item_index == self.active_item_index { let index_to_activate = self .activation_history From 3ff81c2e864d3bdbc576a099c055478b5ad7f0e6 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 17:37:26 -0400 Subject: [PATCH 033/762] assistant: Simplify image insertion (#17668) This PR simplifies how images are inserted into the context editor. We don't need to hold the `images` in a `HashMap` on the `Context`, as we were only inserting them to pull them out again. Release Notes: - N/A --- crates/assistant/src/assistant_panel.rs | 19 ++++++++++-- crates/assistant/src/context.rs | 39 ++----------------------- 2 files changed, 19 insertions(+), 39 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index a0d4cbcf815dab..634f2231cd4f51 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -34,6 +34,7 @@ use editor::{ }; use editor::{display_map::CreaseId, FoldPlaceholder}; use fs::Fs; +use futures::FutureExt; use gpui::{ canvas, div, img, percentage, point, pulsating_between, size, Action, Animation, AnimationExt, AnyElement, AnyView, AppContext, AsyncWindowContext, ClipboardEntry, ClipboardItem, @@ -46,11 +47,11 @@ use indexed_docs::IndexedDocsStore; use language::{ language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset, }; -use language_model::LanguageModelToolUse; use language_model::{ provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, Role, }; +use language_model::{LanguageModelImage, LanguageModelToolUse}; use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; use project::lsp_store::ProjectLspAdapterDelegate; @@ -3551,10 +3552,22 @@ impl ContextEditor { self.context.update(cx, |context, cx| { for image in images { + let Some(render_image) = image.to_image_data(cx).log_err() else { + continue; + }; let image_id = image.id(); - context.insert_image(image, cx); + let image_task = LanguageModelImage::from_image(image, cx).shared(); + for image_position in image_positions.iter() { - context.insert_image_content(image_id, image_position.text_anchor, cx); + context.insert_content( + Content::Image { + anchor: image_position.text_anchor, + image_id, + image: image_task.clone(), + render_image: render_image.clone(), + }, + cx, + ); } } }); diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index e43ec203e96752..7702207d8d2ad9 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -20,8 +20,8 @@ use futures::{ FutureExt, StreamExt, }; use gpui::{ - AppContext, AsyncAppContext, Context as _, EventEmitter, Image, Model, ModelContext, - RenderImage, SharedString, Subscription, Task, + AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, RenderImage, + SharedString, Subscription, Task, }; use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset}; @@ -38,7 +38,6 @@ use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use std::{ cmp::{self, max, Ordering}, - collections::hash_map, fmt::Debug, iter, mem, ops::Range, @@ -49,7 +48,7 @@ use std::{ }; use telemetry_events::AssistantKind; use text::BufferSnapshot; -use util::{post_inc, ResultExt, TryFutureExt}; +use util::{post_inc, TryFutureExt}; use uuid::Uuid; #[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)] @@ -468,7 +467,6 @@ pub struct Context { slash_command_output_sections: Vec>, pending_tool_uses_by_id: HashMap, PendingToolUse>, message_anchors: Vec, - images: HashMap, Shared>>)>, contents: Vec, messages_metadata: HashMap, summary: Option, @@ -564,7 +562,6 @@ impl Context { operations: Vec::new(), message_anchors: Default::default(), contents: Default::default(), - images: Default::default(), messages_metadata: Default::default(), pending_slash_commands: Vec::new(), finished_slash_commands: HashSet::default(), @@ -2374,36 +2371,6 @@ impl Context { } } - pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext) -> Option<()> { - if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) { - entry.insert(( - image.to_image_data(cx).log_err()?, - LanguageModelImage::from_image(image, cx).shared(), - )); - } - - Some(()) - } - - pub fn insert_image_content( - &mut self, - image_id: u64, - anchor: language::Anchor, - cx: &mut ModelContext, - ) { - if let Some((render_image, image)) = self.images.get(&image_id) { - self.insert_content( - Content::Image { - anchor, - image_id, - image: image.clone(), - render_image: render_image.clone(), - }, - cx, - ); - } - } - pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext) { let buffer = self.buffer.read(cx); let insertion_ix = match self From b16af138e20d054d7bc4a17be51420dac2bb26f7 Mon Sep 17 00:00:00 2001 From: Fernando Tagawa Date: Tue, 10 Sep 2024 18:41:37 -0300 Subject: [PATCH 034/762] php: Add auto-indent (#17545) Release Notes: - N/A --- extensions/php/languages/php/indents.scm | 1 + 1 file changed, 1 insertion(+) create mode 100644 extensions/php/languages/php/indents.scm diff --git a/extensions/php/languages/php/indents.scm b/extensions/php/languages/php/indents.scm new file mode 100644 index 00000000000000..e9754690920500 --- /dev/null +++ b/extensions/php/languages/php/indents.scm @@ -0,0 +1 @@ +(_ "{" "}" @end) @indent From d6663fcb29fcc477e4e0a05115561690ad1b7854 Mon Sep 17 00:00:00 2001 From: maan2003 <49202620+maan2003@users.noreply.github.com> Date: Wed, 11 Sep 2024 03:39:00 +0530 Subject: [PATCH 035/762] Pass temperature to Anthropic (#17509) Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- crates/assistant/src/inline_assistant.rs | 15 +-------------- crates/language_model/src/request.rs | 2 +- 2 files changed, 2 insertions(+), 15 deletions(-) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 051db0f2476bf3..246a408477bb6e 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -2373,19 +2373,6 @@ impl Codegen { None }; - // Higher Temperature increases the randomness of model outputs. - // If Markdown or No Language is Known, increase the randomness for more creative output - // If Code, decrease temperature to get more deterministic outputs - let temperature = if let Some(language) = language_name.clone() { - if language == "Markdown".into() { - 1.0 - } else { - 0.5 - } - } else { - 1.0 - }; - let language_name = language_name.as_ref(); let start = buffer.point_to_buffer_offset(edit_range.start); let end = buffer.point_to_buffer_offset(edit_range.end); @@ -2421,7 +2408,7 @@ impl Codegen { messages, tools: Vec::new(), stop: vec!["|END|>".to_string()], - temperature, + temperature: 1., }) } diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 7549785b3bdaad..4162e9df87037a 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -399,7 +399,7 @@ impl LanguageModelRequest { tool_choice: None, metadata: None, stop_sequences: Vec::new(), - temperature: None, + temperature: Some(self.temperature), top_k: None, top_p: None, } From d1a47faeb7295a4ec450ad26191e8a65047805c9 Mon Sep 17 00:00:00 2001 From: Bedis Nbiba Date: Tue, 10 Sep 2024 23:26:11 +0100 Subject: [PATCH 036/762] docs: Update Deno docs (#17579) add the minimal configuration needed to make deno lsp work Release Notes: - N/A --- docs/src/languages/deno.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/src/languages/deno.md b/docs/src/languages/deno.md index 3b5e4a1ff9d746..5b92acfeeb049b 100644 --- a/docs/src/languages/deno.md +++ b/docs/src/languages/deno.md @@ -10,6 +10,15 @@ To use the Deno Language Server with TypeScript and TSX files, you will likely w ```json { + "lsp": { + "deno": { + "settings": { + "deno": { + "enable": true + } + } + } + }, "languages": { "TypeScript": { "language_servers": [ From f374038da0e9dbe81e29b42df29f6c42ab39c378 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 19:19:21 -0400 Subject: [PATCH 037/762] pane: Serialize pinned tab state (#17670) Release Notes: - Tab pin state is now persisted across Zed runs. --- crates/workspace/src/pane.rs | 8 ++++++ crates/workspace/src/persistence.rs | 34 ++++++++++++++++++----- crates/workspace/src/persistence/model.rs | 13 +++++++-- crates/workspace/src/workspace.rs | 5 ++-- 4 files changed, 49 insertions(+), 11 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 23148d6376f456..cc752f7aec9ff6 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -715,6 +715,14 @@ impl Pane { } } + pub(crate) fn set_pinned_count(&mut self, count: usize) { + self.pinned_tab_count = count; + } + + pub(crate) fn pinned_count(&self) -> usize { + self.pinned_tab_count + } + pub fn handle_item_edit(&mut self, item_id: EntityId, cx: &AppContext) { if let Some(preview_item) = self.preview_item() { if preview_item.item_id() == item_id && !preview_item.preserve_preview(cx) { diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index d035b35c1079e1..88ede4228d622d 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -13,7 +13,7 @@ use sqlez::{ }; use ui::px; -use util::ResultExt; +use util::{maybe, ResultExt}; use uuid::Uuid; use crate::WorkspaceId; @@ -352,6 +352,9 @@ define_connection! { sql!( ALTER TABLE workspaces ADD COLUMN window_id INTEGER DEFAULT NULL; ), + sql!( + ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0; + ) ]; } @@ -846,6 +849,7 @@ impl WorkspaceDb { SerializedPaneGroup::Pane(SerializedPane { active: true, children: vec![], + pinned_count: 0, }) })) } @@ -861,15 +865,17 @@ impl WorkspaceDb { Option, Option, Option, + Option, Option, ); self.select_bound::(sql!( - SELECT group_id, axis, pane_id, active, flexes + SELECT group_id, axis, pane_id, active, pinned_count, flexes FROM (SELECT group_id, axis, NULL as pane_id, NULL as active, + NULL as pinned_count, position, parent_group_id, workspace_id, @@ -881,6 +887,7 @@ impl WorkspaceDb { NULL, center_panes.pane_id, panes.active as active, + pinned_count, position, parent_group_id, panes.workspace_id as workspace_id, @@ -891,7 +898,8 @@ impl WorkspaceDb { ORDER BY position ))?((group_id, workspace_id))? .into_iter() - .map(|(group_id, axis, pane_id, active, flexes)| { + .map(|(group_id, axis, pane_id, active, pinned_count, flexes)| { + let maybe_pane = maybe!({ Some((pane_id?, active?, pinned_count?)) }); if let Some((group_id, axis)) = group_id.zip(axis) { let flexes = flexes .map(|flexes: String| serde_json::from_str::>(&flexes)) @@ -902,10 +910,11 @@ impl WorkspaceDb { children: self.get_pane_group(workspace_id, Some(group_id))?, flexes, }) - } else if let Some((pane_id, active)) = pane_id.zip(active) { + } else if let Some((pane_id, active, pinned_count)) = maybe_pane { Ok(SerializedPaneGroup::Pane(SerializedPane::new( self.get_items(pane_id)?, active, + pinned_count, ))) } else { bail!("Pane Group Child was neither a pane group or a pane"); @@ -977,10 +986,10 @@ impl WorkspaceDb { parent: Option<(GroupId, usize)>, ) -> Result { let pane_id = conn.select_row_bound::<_, i64>(sql!( - INSERT INTO panes(workspace_id, active) - VALUES (?, ?) + INSERT INTO panes(workspace_id, active, pinned_count) + VALUES (?, ?, ?) RETURNING pane_id - ))?((workspace_id, pane.active))? + ))?((workspace_id, pane.active, pane.pinned_count))? .ok_or_else(|| anyhow!("Could not retrieve inserted pane_id"))?; let (parent_id, order) = parent.unzip(); @@ -1219,6 +1228,7 @@ mod tests { SerializedItem::new("Terminal", 6, true, false), ], false, + 0, )), SerializedPaneGroup::Pane(SerializedPane::new( vec![ @@ -1226,6 +1236,7 @@ mod tests { SerializedItem::new("Terminal", 8, false, false), ], false, + 0, )), ], ), @@ -1235,6 +1246,7 @@ mod tests { SerializedItem::new("Terminal", 10, true, false), ], false, + 0, )), ], ); @@ -1523,6 +1535,7 @@ mod tests { SerializedItem::new("Terminal", 2, true, false), ], false, + 0, )), SerializedPaneGroup::Pane(SerializedPane::new( vec![ @@ -1530,6 +1543,7 @@ mod tests { SerializedItem::new("Terminal", 3, true, false), ], true, + 0, )), ], ), @@ -1539,6 +1553,7 @@ mod tests { SerializedItem::new("Terminal", 6, false, false), ], false, + 0, )), ], ); @@ -1570,6 +1585,7 @@ mod tests { SerializedItem::new("Terminal", 2, true, false), ], false, + 0, )), SerializedPaneGroup::Pane(SerializedPane::new( vec![ @@ -1577,6 +1593,7 @@ mod tests { SerializedItem::new("Terminal", 3, true, false), ], true, + 0, )), ], ), @@ -1586,6 +1603,7 @@ mod tests { SerializedItem::new("Terminal", 6, true, false), ], false, + 0, )), ], ); @@ -1605,6 +1623,7 @@ mod tests { SerializedItem::new("Terminal", 2, true, false), ], false, + 0, )), SerializedPaneGroup::Pane(SerializedPane::new( vec![ @@ -1612,6 +1631,7 @@ mod tests { SerializedItem::new("Terminal", 3, false, false), ], true, + 0, )), ], ); diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 8b6d66f3cb4d6e..d6f8001f25fc3e 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -297,6 +297,7 @@ impl Default for SerializedPaneGroup { Self::Pane(SerializedPane { children: vec![SerializedItem::default()], active: false, + pinned_count: 0, }) } } @@ -379,11 +380,16 @@ impl SerializedPaneGroup { pub struct SerializedPane { pub(crate) active: bool, pub(crate) children: Vec, + pub(crate) pinned_count: usize, } impl SerializedPane { - pub fn new(children: Vec, active: bool) -> Self { - SerializedPane { children, active } + pub fn new(children: Vec, active: bool, pinned_count: usize) -> Self { + SerializedPane { + children, + active, + pinned_count, + } } pub async fn deserialize_to( @@ -442,6 +448,9 @@ impl SerializedPane { } })?; } + pane.update(cx, |pane, _| { + pane.set_pinned_count(self.pinned_count); + })?; anyhow::Ok(items) } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 219f75624c8ffd..7371e56cee2376 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -4025,7 +4025,7 @@ impl Workspace { }; fn serialize_pane_handle(pane_handle: &View, cx: &WindowContext) -> SerializedPane { - let (items, active) = { + let (items, active, pinned_count) = { let pane = pane_handle.read(cx); let active_item_id = pane.active_item().map(|item| item.item_id()); ( @@ -4042,10 +4042,11 @@ impl Workspace { }) .collect::>(), pane.has_focus(cx), + pane.pinned_count(), ) }; - SerializedPane::new(items, active) + SerializedPane::new(items, active, pinned_count) } fn build_serialized_pane_group( From 8f284456126d53b69d639b159a33effa57bfa8cd Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 22:57:44 -0400 Subject: [PATCH 038/762] php: Bump to v0.2.0 (#17674) This PR bumps the PHP extension to v0.2.0. Changes: - https://github.com/zed-industries/zed/pull/16720 - https://github.com/zed-industries/zed/pull/16955 - https://github.com/zed-industries/zed/pull/17243 - https://github.com/zed-industries/zed/pull/17545 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/php/Cargo.toml | 2 +- extensions/php/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f9d3240d682a93..07b97e030df7bc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14441,7 +14441,7 @@ dependencies = [ [[package]] name = "zed_php" -version = "0.1.3" +version = "0.2.0" dependencies = [ "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/extensions/php/Cargo.toml b/extensions/php/Cargo.toml index 91f309d9de07ad..d4a8f36cbdf83e 100644 --- a/extensions/php/Cargo.toml +++ b/extensions/php/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_php" -version = "0.1.3" +version = "0.2.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/php/extension.toml b/extensions/php/extension.toml index cb30eb5f0cd55f..92bd7ed85bc465 100644 --- a/extensions/php/extension.toml +++ b/extensions/php/extension.toml @@ -1,7 +1,7 @@ id = "php" name = "PHP" description = "PHP support." -version = "0.1.3" +version = "0.2.0" schema_version = 1 authors = ["Piotr Osiewicz "] repository = "https://github.com/zed-industries/zed" From ccf6f27b8f1bdfb803b9cc0da0b0cf5c9e136dd9 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Tue, 10 Sep 2024 22:59:10 -0400 Subject: [PATCH 039/762] settings: Remove auxiliary Content types where possible (#16744) Release Notes: - N/A --- crates/auto_update/src/auto_update.rs | 23 +- crates/call/src/call_settings.rs | 22 +- crates/client/src/client.rs | 50 ++- crates/collab/src/tests/editor_tests.rs | 8 +- crates/collab/src/tests/following_tests.rs | 2 +- crates/collab_ui/src/chat_panel.rs | 2 +- .../src/chat_panel/message_editor.rs | 8 +- crates/collab_ui/src/collab_panel.rs | 2 +- crates/collab_ui/src/notification_panel.rs | 2 +- crates/collab_ui/src/panel_settings.rs | 74 ++-- .../src/project_diagnostics_settings.rs | 20 +- crates/editor/src/editor.rs | 4 +- crates/editor/src/editor_settings.rs | 318 +++++++----------- crates/editor/src/editor_settings_controls.rs | 22 +- crates/editor/src/editor_tests.rs | 18 +- crates/editor/src/element.rs | 17 +- crates/extension/src/extension_settings.rs | 13 +- crates/extensions_ui/src/extensions_ui.rs | 2 +- crates/go_to_line/src/cursor_position.rs | 16 +- crates/gpui/src/geometry.rs | 2 + crates/language/src/language_settings.rs | 8 +- crates/languages/src/json.rs | 29 +- crates/outline_panel/src/outline_panel.rs | 16 +- .../src/outline_panel_settings.rs | 72 ++-- crates/performance/src/performance.rs | 184 ++++++++++ crates/project/src/project_settings.rs | 41 +-- crates/project_panel/src/project_panel.rs | 28 +- .../src/project_panel_settings.rs | 92 ++--- crates/recent_projects/src/dev_servers.rs | 3 +- crates/recent_projects/src/ssh_connections.rs | 25 +- crates/repl/src/jupyter_settings.rs | 28 +- crates/tasks_ui/src/settings.rs | 18 +- crates/vim/src/digraph.rs | 2 +- crates/vim/src/normal.rs | 6 +- crates/vim/src/normal/paste.rs | 12 +- crates/vim/src/normal/scroll.rs | 2 +- crates/vim/src/normal/search.rs | 4 +- crates/vim/src/test.rs | 2 +- crates/vim/src/test/vim_test_context.rs | 6 +- crates/vim/src/vim.rs | 32 +- crates/welcome/src/base_keymap_picker.rs | 2 +- crates/welcome/src/base_keymap_setting.rs | 6 +- crates/welcome/src/welcome.rs | 2 +- crates/workspace/src/item.rs | 70 ++-- crates/workspace/src/workspace.rs | 8 +- crates/workspace/src/workspace_settings.rs | 132 ++++---- crates/worktree/src/worktree_settings.rs | 43 ++- crates/worktree/src/worktree_tests.rs | 11 +- crates/zed/src/zed.rs | 2 +- 49 files changed, 829 insertions(+), 682 deletions(-) create mode 100644 crates/performance/src/performance.rs diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 8063ff4c40fca3..499df7fc298594 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -116,27 +116,30 @@ impl Drop for MacOsUnmounter { } } -struct AutoUpdateSetting(bool); - /// Whether or not to automatically check for updates. -/// -/// Default: true -#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)] +#[derive(Clone, Copy, JsonSchema, Deserialize, Serialize)] +#[serde(default)] #[serde(transparent)] -struct AutoUpdateSettingContent(bool); +struct AutoUpdateSetting(bool); + +impl Default for AutoUpdateSetting { + fn default() -> Self { + Self(true) + } +} impl Settings for AutoUpdateSetting { const KEY: Option<&'static str> = Some("auto_update"); - type FileContent = Option; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { let auto_update = [sources.release_channel, sources.user] .into_iter() - .find_map(|value| value.copied().flatten()) - .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); + .find_map(|value| value.copied()) + .unwrap_or(*sources.default); - Ok(Self(auto_update.0)) + Ok(auto_update) } } diff --git a/crates/call/src/call_settings.rs b/crates/call/src/call_settings.rs index 446178ffb982d0..e10b711734bb6d 100644 --- a/crates/call/src/call_settings.rs +++ b/crates/call/src/call_settings.rs @@ -4,30 +4,20 @@ use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Deserialize, Debug)] -pub struct CallSettings { - pub mute_on_join: bool, - pub share_on_join: bool, -} - /// Configuration of voice calls in Zed. -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct CallSettingsContent { +#[derive(Clone, Debug, Default, Deserialize, Serialize, JsonSchema)] +#[serde(default)] +pub struct CallSettings { /// Whether the microphone should be muted when joining a channel or a call. - /// - /// Default: false - pub mute_on_join: Option, - + pub mute_on_join: bool, /// Whether your current project should be shared when joining an empty channel. - /// - /// Default: true - pub share_on_join: Option, + pub share_on_join: bool, } impl Settings for CallSettings { const KEY: Option<&'static str> = Some("calls"); - type FileContent = CallSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 8787e2ed9675fe..83eef45be802bb 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -99,20 +99,26 @@ pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(20); actions!(client, [SignIn, SignOut, Reconnect]); -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct ClientSettingsContent { - server_url: Option, -} - -#[derive(Deserialize)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct ClientSettings { + /// The server to connect to. If the environment variable + /// ZED_SERVER_URL is set, it will override this setting. pub server_url: String, } +impl Default for ClientSettings { + fn default() -> Self { + Self { + server_url: "https://zed.dev".to_owned(), + } + } +} + impl Settings for ClientSettings { const KEY: Option<&'static str> = None; - type FileContent = ClientSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { let mut result = sources.json_merge::()?; @@ -124,19 +130,37 @@ impl Settings for ClientSettings { } #[derive(Default, Clone, Serialize, Deserialize, JsonSchema)] -pub struct ProxySettingsContent { - proxy: Option, -} - -#[derive(Deserialize, Default)] +#[serde(default)] pub struct ProxySettings { + /// Set a proxy to use. The proxy protocol is specified by the URI scheme. + /// + /// Supported URI scheme: `http`, `https`, `socks4`, `socks4a`, `socks5`, + /// `socks5h`. `http` will be used when no scheme is specified. + /// + /// By default no proxy will be used, or Zed will try get proxy settings from + /// environment variables. + /// + /// Examples: + /// - "proxy": "socks5://localhost:10808" + /// - "proxy": "http://127.0.0.1:10809" + #[schemars(example = "Self::example_1")] + #[schemars(example = "Self::example_2")] pub proxy: Option, } +impl ProxySettings { + fn example_1() -> String { + "http://127.0.0.1:10809".to_owned() + } + fn example_2() -> String { + "socks5://localhost:10808".to_owned() + } +} + impl Settings for ProxySettings { const KEY: Option<&'static str> = None; - type FileContent = ProxySettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { Ok(Self { diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 3f205b7f937c96..a214291752a5ef 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -2261,11 +2261,11 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA cx_a.update(editor::init); cx_b.update(editor::init); // Turn inline-blame-off by default so no state is transferred without us explicitly doing so - let inline_blame_off_settings = Some(InlineBlameSettings { + let inline_blame_off_settings = InlineBlameSettings { enabled: false, - delay_ms: None, - min_column: None, - }); + delay_ms: 0, + min_column: 0, + }; cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings::(cx, |settings| { diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index e66b66a1b45893..1bc3cd691778d1 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -1649,7 +1649,7 @@ async fn test_following_into_excluded_file( cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |settings| { - settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]); + settings.file_scan_exclusions = vec!["**/.git".to_string()]; }); }); }); diff --git a/crates/collab_ui/src/chat_panel.rs b/crates/collab_ui/src/chat_panel.rs index 5a79f364ff571f..f6e6c7321ff80d 100644 --- a/crates/collab_ui/src/chat_panel.rs +++ b/crates/collab_ui/src/chat_panel.rs @@ -1108,7 +1108,7 @@ impl Panel for ChatPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = Some(position), + move |settings, _| settings.dock = position, ); } diff --git a/crates/collab_ui/src/chat_panel/message_editor.rs b/crates/collab_ui/src/chat_panel/message_editor.rs index 028e148cbac039..0b1a2dbe69ec0b 100644 --- a/crates/collab_ui/src/chat_panel/message_editor.rs +++ b/crates/collab_ui/src/chat_panel/message_editor.rs @@ -113,9 +113,7 @@ impl MessageEditor { editor.set_show_indent_guides(false, cx); editor.set_completion_provider(Box::new(MessageEditorCompletionProvider(this))); editor.set_auto_replace_emoji_shortcode( - MessageEditorSettings::get_global(cx) - .auto_replace_emoji_shortcode - .unwrap_or_default(), + MessageEditorSettings::get_global(cx).auto_replace_emoji_shortcode, ); }); @@ -130,9 +128,7 @@ impl MessageEditor { cx.observe_global::(|view, cx| { view.editor.update(cx, |editor, cx| { editor.set_auto_replace_emoji_shortcode( - MessageEditorSettings::get_global(cx) - .auto_replace_emoji_shortcode - .unwrap_or_default(), + MessageEditorSettings::get_global(cx).auto_replace_emoji_shortcode, ) }) }) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 72701101816995..3e6483c42dd726 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2813,7 +2813,7 @@ impl Panel for CollabPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = Some(position), + move |settings, _| settings.dock = position, ); } diff --git a/crates/collab_ui/src/notification_panel.rs b/crates/collab_ui/src/notification_panel.rs index 33ca5a2952180c..326e1f0f5bdc2b 100644 --- a/crates/collab_ui/src/notification_panel.rs +++ b/crates/collab_ui/src/notification_panel.rs @@ -672,7 +672,7 @@ impl Panel for NotificationPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = Some(position), + move |settings, _| settings.dock = position, ); } diff --git a/crates/collab_ui/src/panel_settings.rs b/crates/collab_ui/src/panel_settings.rs index f9851d5797306c..a594f023bbcd3e 100644 --- a/crates/collab_ui/src/panel_settings.rs +++ b/crates/collab_ui/src/panel_settings.rs @@ -2,58 +2,84 @@ use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; +use ui::px; use workspace::dock::DockPosition; -#[derive(Deserialize, Debug)] +#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] +#[serde(default)] pub struct CollaborationPanelSettings { + /// Whether to show the panel button in the status bar. pub button: bool, + /// Where to dock the panel. pub dock: DockPosition, + /// Default width of the panel in pixels. pub default_width: Pixels, } -#[derive(Deserialize, Debug)] +impl Default for CollaborationPanelSettings { + fn default() -> Self { + Self { + button: true, + dock: DockPosition::Left, + default_width: px(240.), + } + } +} + +#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] +#[serde(default)] pub struct ChatPanelSettings { + /// Whether to show the panel button in the status bar. pub button: bool, + /// Where to dock the panel. pub dock: DockPosition, + /// Default width of the panel in pixels. pub default_width: Pixels, } -#[derive(Deserialize, Debug)] +impl Default for ChatPanelSettings { + fn default() -> Self { + Self { + button: true, + dock: DockPosition::Right, + default_width: px(240.), + } + } +} + +#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] +#[serde(default)] pub struct NotificationPanelSettings { + /// Whether to show the panel button in the status bar. pub button: bool, + /// Where to dock the panel. pub dock: DockPosition, + /// Default width of the panel in pixels. pub default_width: Pixels, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct PanelSettingsContent { - /// Whether to show the panel button in the status bar. - /// - /// Default: true - pub button: Option, - /// Where to dock the panel. - /// - /// Default: left - pub dock: Option, - /// Default width of the panel in pixels. - /// - /// Default: 240 - pub default_width: Option, +impl Default for NotificationPanelSettings { + fn default() -> Self { + Self { + button: true, + dock: DockPosition::Right, + default_width: px(380.), + } + } } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +#[serde(default)] pub struct MessageEditorSettings { /// Whether to automatically replace emoji shortcodes with emoji characters. /// For example: typing `:wave:` gets replaced with `👋`. - /// - /// Default: false - pub auto_replace_emoji_shortcode: Option, + pub auto_replace_emoji_shortcode: bool, } impl Settings for CollaborationPanelSettings { const KEY: Option<&'static str> = Some("collaboration_panel"); - type FileContent = PanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, @@ -66,7 +92,7 @@ impl Settings for CollaborationPanelSettings { impl Settings for ChatPanelSettings { const KEY: Option<&'static str> = Some("chat_panel"); - type FileContent = PanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, @@ -79,7 +105,7 @@ impl Settings for ChatPanelSettings { impl Settings for NotificationPanelSettings { const KEY: Option<&'static str> = Some("notification_panel"); - type FileContent = PanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, @@ -92,7 +118,7 @@ impl Settings for NotificationPanelSettings { impl Settings for MessageEditorSettings { const KEY: Option<&'static str> = Some("message_editor"); - type FileContent = MessageEditorSettings; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/diagnostics/src/project_diagnostics_settings.rs b/crates/diagnostics/src/project_diagnostics_settings.rs index 55879d0c426e2b..34739bcd170adf 100644 --- a/crates/diagnostics/src/project_diagnostics_settings.rs +++ b/crates/diagnostics/src/project_diagnostics_settings.rs @@ -4,23 +4,25 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] +#[serde(default)] +/// Diagnostics configuration. pub struct ProjectDiagnosticsSettings { + /// Whether to show warnings or not by default. pub include_warnings: bool, } -/// Diagnostics configuration. -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct ProjectDiagnosticsSettingsContent { - /// Whether to show warnings or not by default. - /// - /// Default: true - include_warnings: Option, +impl Default for ProjectDiagnosticsSettings { + fn default() -> Self { + Self { + include_warnings: true, + } + } } impl Settings for ProjectDiagnosticsSettings { const KEY: Option<&'static str> = Some("diagnostics"); - type FileContent = ProjectDiagnosticsSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 3466888c942de1..f750abd95c76df 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10639,7 +10639,7 @@ impl Editor { let fs = workspace.read(cx).app_state().fs.clone(); let current_show = TabBarSettings::get_global(cx).show; update_settings_file::(fs, cx, move |setting, _| { - setting.show = Some(!current_show); + setting.show = !current_show; }); } @@ -12562,7 +12562,7 @@ impl EditorSnapshot { let show_git_gutter = self.show_git_diff_gutter.unwrap_or_else(|| { matches!( ProjectSettings::get_global(cx).git.git_gutter, - Some(GitGutterSetting::TrackedFiles) + GitGutterSetting::TrackedFiles ) }); let gutter_settings = EditorSettings::get_global(cx).gutter; diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 2614e4ea303d24..0532fd7bdf880c 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -3,38 +3,105 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Deserialize, Clone)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct EditorSettings { + /// Whether the cursor blinks in the editor. pub cursor_blink: bool, + /// How to highlight the current line in the editor. pub current_line_highlight: CurrentLineHighlight, + /// Whether to show the informational hover box when moving the mouse + /// over symbols in the editor. pub hover_popover_enabled: bool, + /// Whether to pop the completions menu while typing in an editor without + /// explicitly requesting it. pub show_completions_on_input: bool, + /// Whether to display inline and alongside documentation for items in the + /// completions menu. pub show_completion_documentation: bool, + /// The debounce delay before re-querying the language server for completion + /// documentation when not included in original completion list. pub completion_documentation_secondary_query_debounce: u64, + /// Whether to use additional LSP queries to format (and amend) the code after + /// every "trigger" symbol input, defined by LSP server capabilities. pub use_on_type_format: bool, + /// Toolbar related settings pub toolbar: Toolbar, + /// Scrollbar related settings pub scrollbar: Scrollbar, + /// Gutter related settings pub gutter: Gutter, + /// Whether the editor will scroll beyond the last line. pub scroll_beyond_last_line: ScrollBeyondLastLine, + /// The number of lines to keep above/below the cursor when auto-scrolling. pub vertical_scroll_margin: f32, + /// Scroll sensitivity multiplier. This multiplier is applied + /// to both the horizontal and vertical delta values while scrolling. pub scroll_sensitivity: f32, + /// Whether the line numbers on editors gutter are relative or not. pub relative_line_numbers: bool, + /// When to populate a new search's query based on the text under the cursor. pub seed_search_query_from_cursor: SeedQuerySetting, pub use_smartcase_search: bool, + /// The key to use for adding multiple cursors pub multi_cursor_modifier: MultiCursorModifier, + /// Hide the values of variables in `private` files, as defined by the + /// private_files setting. This only changes the visual representation, + /// the values are still present in the file and can be selected / copied / pasted pub redact_private_values: bool, + + /// How many lines to expand the multibuffer excerpts by default pub expand_excerpt_lines: u32, pub middle_click_paste: bool, + /// What to do when multibuffer is double clicked in some of its excerpts + /// (parts of singleton buffers). #[serde(default)] pub double_click_in_multibuffer: DoubleClickInMultibuffer, + /// Whether the editor search results will loop pub search_wrap: bool, #[serde(default)] pub search: SearchSettings, + /// Show method signatures in the editor, when inside parentheses. pub auto_signature_help: bool, + /// Whether to show the signature help after completion or a bracket pair inserted. + /// If `auto_signature_help` is enabled, this setting will be treated as enabled also. pub show_signature_help_after_edits: bool, + /// Jupyter REPL settings. pub jupyter: Jupyter, } +impl Default for EditorSettings { + fn default() -> Self { + Self { + cursor_blink: true, + current_line_highlight: CurrentLineHighlight::All, + hover_popover_enabled: true, + show_completions_on_input: true, + show_completion_documentation: true, + completion_documentation_secondary_query_debounce: 300, + use_on_type_format: true, + toolbar: Default::default(), + scrollbar: Default::default(), + gutter: Default::default(), + scroll_beyond_last_line: ScrollBeyondLastLine::OnePage, + vertical_scroll_margin: 3., + scroll_sensitivity: 1.0, + relative_line_numbers: false, + seed_search_query_from_cursor: SeedQuerySetting::Always, + multi_cursor_modifier: MultiCursorModifier::Alt, + redact_private_values: false, + expand_excerpt_lines: 3, + double_click_in_multibuffer: DoubleClickInMultibuffer::Select, + search_wrap: true, + auto_signature_help: false, + show_signature_help_after_edits: true, + jupyter: Default::default(), + use_smartcase_search: false, + middle_click_paste: true, + search: SearchSettings::default(), + } + } +} #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CurrentLineHighlight { @@ -72,48 +139,93 @@ pub enum DoubleClickInMultibuffer { Open, } -#[derive(Debug, Clone, Deserialize)] +#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] pub struct Jupyter { /// Whether the Jupyter feature is enabled. - /// - /// Default: true pub enabled: bool, } -#[derive(Default, Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] -#[serde(rename_all = "snake_case")] -pub struct JupyterContent { - /// Whether the Jupyter feature is enabled. - /// - /// Default: true - pub enabled: Option, +impl Default for Jupyter { + fn default() -> Self { + Self { enabled: true } + } } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(default)] pub struct Toolbar { + /// Whether to display breadcrumbs in the editor toolbar. pub breadcrumbs: bool, + /// Whether to display quick action buttons in the editor toolbar. pub quick_actions: bool, + /// Whether to show the selections menu in the editor toolbar pub selections_menu: bool, } +impl Default for Toolbar { + fn default() -> Self { + Self { + breadcrumbs: true, + quick_actions: true, + selections_menu: true, + } + } +} + #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct Scrollbar { + /// When to show the scrollbar in the editor. pub show: ShowScrollbar, + /// Whether to show git diff indicators in the scrollbar. pub git_diff: bool, + /// Whether to show buffer search result indicators in the scrollbar. pub selected_symbol: bool, + /// Whether to show selected symbol occurrences in the scrollbar. pub search_results: bool, + /// Whether to show diagnostic indicators in the scrollbar. pub diagnostics: bool, + /// Whether to show cursor positions in the scrollbar. pub cursors: bool, } +impl Default for Scrollbar { + fn default() -> Self { + Self { + show: ShowScrollbar::Auto, + git_diff: true, + selected_symbol: true, + search_results: true, + diagnostics: true, + cursors: true, + } + } +} + +/// Gutter-related settings. #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(default)] pub struct Gutter { + /// Whether to show line numbers in the gutter. pub line_numbers: bool, + /// Whether to show code action buttons in the gutter. pub code_actions: bool, + /// Whether to show runnable buttons in the gutter. pub runnables: bool, + /// Whether to show fold buttons in the gutter. pub folds: bool, } +impl Default for Gutter { + fn default() -> Self { + Self { + line_numbers: true, + code_actions: true, + runnables: true, + folds: true, + } + } +} + /// When to show the scrollbar in the editor. /// /// Default: auto @@ -171,188 +283,6 @@ pub struct SearchSettings { pub regex: bool, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct EditorSettingsContent { - /// Whether the cursor blinks in the editor. - /// - /// Default: true - pub cursor_blink: Option, - /// How to highlight the current line in the editor. - /// - /// Default: all - pub current_line_highlight: Option, - /// Whether to show the informational hover box when moving the mouse - /// over symbols in the editor. - /// - /// Default: true - pub hover_popover_enabled: Option, - - /// Whether to pop the completions menu while typing in an editor without - /// explicitly requesting it. - /// - /// Default: true - pub show_completions_on_input: Option, - /// Whether to display inline and alongside documentation for items in the - /// completions menu. - /// - /// Default: true - pub show_completion_documentation: Option, - /// The debounce delay before re-querying the language server for completion - /// documentation when not included in original completion list. - /// - /// Default: 300 ms - pub completion_documentation_secondary_query_debounce: Option, - /// Whether to use additional LSP queries to format (and amend) the code after - /// every "trigger" symbol input, defined by LSP server capabilities. - /// - /// Default: true - pub use_on_type_format: Option, - /// Toolbar related settings - pub toolbar: Option, - /// Scrollbar related settings - pub scrollbar: Option, - /// Gutter related settings - pub gutter: Option, - /// Whether the editor will scroll beyond the last line. - /// - /// Default: one_page - pub scroll_beyond_last_line: Option, - /// The number of lines to keep above/below the cursor when auto-scrolling. - /// - /// Default: 3. - pub vertical_scroll_margin: Option, - /// Scroll sensitivity multiplier. This multiplier is applied - /// to both the horizontal and vertical delta values while scrolling. - /// - /// Default: 1.0 - pub scroll_sensitivity: Option, - /// Whether the line numbers on editors gutter are relative or not. - /// - /// Default: false - pub relative_line_numbers: Option, - /// When to populate a new search's query based on the text under the cursor. - /// - /// Default: always - pub seed_search_query_from_cursor: Option, - pub use_smartcase_search: Option, - /// The key to use for adding multiple cursors - /// - /// Default: alt - pub multi_cursor_modifier: Option, - /// Hide the values of variables in `private` files, as defined by the - /// private_files setting. This only changes the visual representation, - /// the values are still present in the file and can be selected / copied / pasted - /// - /// Default: false - pub redact_private_values: Option, - - /// How many lines to expand the multibuffer excerpts by default - /// - /// Default: 3 - pub expand_excerpt_lines: Option, - - /// Whether to enable middle-click paste on Linux - /// - /// Default: true - pub middle_click_paste: Option, - - /// What to do when multibuffer is double clicked in some of its excerpts - /// (parts of singleton buffers). - /// - /// Default: select - pub double_click_in_multibuffer: Option, - /// Whether the editor search results will loop - /// - /// Default: true - pub search_wrap: Option, - - /// Defaults to use when opening a new buffer and project search items. - /// - /// Default: nothing is enabled - pub search: Option, - - /// Whether to automatically show a signature help pop-up or not. - /// - /// Default: false - pub auto_signature_help: Option, - - /// Whether to show the signature help pop-up after completions or bracket pairs inserted. - /// - /// Default: true - pub show_signature_help_after_edits: Option, - - /// Jupyter REPL settings. - pub jupyter: Option, -} - -// Toolbar related settings -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -pub struct ToolbarContent { - /// Whether to display breadcrumbs in the editor toolbar. - /// - /// Default: true - pub breadcrumbs: Option, - /// Whether to display quick action buttons in the editor toolbar. - /// - /// Default: true - pub quick_actions: Option, - - /// Whether to show the selections menu in the editor toolbar - /// - /// Default: true - pub selections_menu: Option, -} - -/// Scrollbar related settings -#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] -pub struct ScrollbarContent { - /// When to show the scrollbar in the editor. - /// - /// Default: auto - pub show: Option, - /// Whether to show git diff indicators in the scrollbar. - /// - /// Default: true - pub git_diff: Option, - /// Whether to show buffer search result indicators in the scrollbar. - /// - /// Default: true - pub search_results: Option, - /// Whether to show selected symbol occurrences in the scrollbar. - /// - /// Default: true - pub selected_symbol: Option, - /// Whether to show diagnostic indicators in the scrollbar. - /// - /// Default: true - pub diagnostics: Option, - /// Whether to show cursor positions in the scrollbar. - /// - /// Default: true - pub cursors: Option, -} - -/// Gutter related settings -#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -pub struct GutterContent { - /// Whether to show line numbers in the gutter. - /// - /// Default: true - pub line_numbers: Option, - /// Whether to show code action buttons in the gutter. - /// - /// Default: true - pub code_actions: Option, - /// Whether to show runnable buttons in the gutter. - /// - /// Default: true - pub runnables: Option, - /// Whether to show fold buttons in the gutter. - /// - /// Default: true - pub folds: Option, -} - impl EditorSettings { pub fn jupyter_enabled(cx: &AppContext) -> bool { EditorSettings::get_global(cx).jupyter.enabled @@ -362,7 +292,7 @@ impl EditorSettings { impl Settings for EditorSettings { const KEY: Option<&'static str> = None; - type FileContent = EditorSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/editor/src/editor_settings_controls.rs b/crates/editor/src/editor_settings_controls.rs index bbe1b00324a787..36d471dfa28f4b 100644 --- a/crates/editor/src/editor_settings_controls.rs +++ b/crates/editor/src/editor_settings_controls.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use gpui::{AppContext, FontFeatures, FontWeight}; -use project::project_settings::{InlineBlameSettings, ProjectSettings}; +use project::project_settings::ProjectSettings; use settings::{EditableSettingControl, Settings}; use theme::{FontFamilyCache, ThemeSettings}; use ui::{ @@ -296,14 +296,7 @@ impl EditableSettingControl for InlineGitBlameControl { value: Self::Value, _cx: &AppContext, ) { - if let Some(inline_blame) = settings.git.inline_blame.as_mut() { - inline_blame.enabled = value; - } else { - settings.git.inline_blame = Some(InlineBlameSettings { - enabled: false, - ..Default::default() - }); - } + settings.git.inline_blame.enabled = value; } } @@ -349,14 +342,7 @@ impl EditableSettingControl for LineNumbersControl { value: Self::Value, _cx: &AppContext, ) { - if let Some(gutter) = settings.gutter.as_mut() { - gutter.line_numbers = Some(value); - } else { - settings.gutter = Some(crate::editor_settings::GutterContent { - line_numbers: Some(value), - ..Default::default() - }); - } + settings.gutter.line_numbers = value; } } @@ -402,7 +388,7 @@ impl EditableSettingControl for RelativeLineNumbersControl { value: Self::Value, _cx: &AppContext, ) { - settings.relative_line_numbers = Some(value); + settings.relative_line_numbers = value; } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 0b1e0385ded4ad..7d42dc7a85d06e 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6964,7 +6964,7 @@ async fn test_handle_input_for_show_signature_help_auto_signature_help_true( cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(true); + settings.auto_signature_help = true; }); }); }); @@ -7105,8 +7105,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(false); - settings.show_signature_help_after_edits = Some(false); + settings.auto_signature_help = false; + settings.show_signature_help_after_edits = false; }); }); }); @@ -7232,8 +7232,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(false); - settings.show_signature_help_after_edits = Some(true); + settings.auto_signature_help = false; + settings.show_signature_help_after_edits = true; }); }); }); @@ -7274,8 +7274,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(true); - settings.show_signature_help_after_edits = Some(false); + settings.auto_signature_help = true; + settings.show_signature_help_after_edits = false; }); }); }); @@ -7318,7 +7318,7 @@ async fn test_signature_help(cx: &mut gpui::TestAppContext) { cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = Some(true); + settings.auto_signature_help = true; }); }); }); @@ -7759,7 +7759,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) { cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.show_completions_on_input = Some(false); + settings.show_completions_on_input = false; }); }) }); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index d4f5c565c27ef0..1c0a325b7620e5 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1283,10 +1283,7 @@ impl EditorElement { .row, ); - let git_gutter_setting = ProjectSettings::get_global(cx) - .git - .git_gutter - .unwrap_or_default(); + let git_gutter_setting = ProjectSettings::get_global(cx).git.git_gutter; let display_hunks = buffer_snapshot .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) .map(|hunk| diff_hunk_to_display(&hunk, snapshot)) @@ -1366,12 +1363,10 @@ impl EditorElement { }; let padded_line_end = line_end + em_width * INLINE_BLAME_PADDING_EM_WIDTHS; - let min_column_in_pixels = ProjectSettings::get_global(cx) - .git - .inline_blame - .and_then(|settings| settings.min_column) - .map(|col| self.column_pixels(col as usize, cx)) - .unwrap_or(px(0.)); + let min_column_in_pixels = self.column_pixels( + ProjectSettings::get_global(cx).git.inline_blame.min_column as usize, + cx, + ); let min_start = content_origin.x - scroll_pixel_position.x + min_column_in_pixels; cmp::max(padded_line_end, min_start) @@ -3331,7 +3326,7 @@ impl EditorElement { .unwrap_or_else(|| { matches!( ProjectSettings::get_global(cx).git.git_gutter, - Some(GitGutterSetting::TrackedFiles) + GitGutterSetting::TrackedFiles ) }); if show_git_gutter { diff --git a/crates/extension/src/extension_settings.rs b/crates/extension/src/extension_settings.rs index a2ab7ac9cca73b..715dc3ca827f51 100644 --- a/crates/extension/src/extension_settings.rs +++ b/crates/extension/src/extension_settings.rs @@ -6,18 +6,25 @@ use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; use std::sync::Arc; -#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] +#[derive(Deserialize, Serialize, Debug, Clone, JsonSchema)] +#[serde(default)] pub struct ExtensionSettings { /// The extensions that should be automatically installed by Zed. /// /// This is used to make functionality provided by extensions (e.g., language support) /// available out-of-the-box. - #[serde(default)] pub auto_install_extensions: HashMap, bool>, - #[serde(default)] pub auto_update_extensions: HashMap, bool>, } +impl Default for ExtensionSettings { + fn default() -> Self { + Self { + auto_install_extensions: HashMap::from_iter([("html".into(), true)]), + auto_update_extensions: Default::default(), + } + } +} impl ExtensionSettings { /// Returns whether the given extension should be auto-installed. pub fn should_auto_install(&self, extension_id: &str) -> bool { diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index f246e3cf4fb830..b2d6d7f2831720 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -1000,7 +1000,7 @@ impl ExtensionsPage { this.update_settings::( selection, cx, - |setting, value| *setting = Some(value), + |setting, value| *setting = VimModeSetting(value), ); }), )), diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 63e0f2b07915eb..de3d1dc74d2774 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -180,18 +180,10 @@ pub(crate) enum LineIndicatorFormat { Long, } -/// Whether or not to automatically check for updates. -/// -/// Values: short, long -/// Default: short -#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)] -#[serde(transparent)] -pub(crate) struct LineIndicatorFormatContent(LineIndicatorFormat); - impl Settings for LineIndicatorFormat { const KEY: Option<&'static str> = Some("line_indicator_format"); - type FileContent = Option; + type FileContent = Self; fn load( sources: SettingsSources, @@ -199,9 +191,9 @@ impl Settings for LineIndicatorFormat { ) -> anyhow::Result { let format = [sources.release_channel, sources.user] .into_iter() - .find_map(|value| value.copied().flatten()) - .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); + .find_map(|value| value.copied()) + .unwrap_or(*sources.default); - Ok(format.0) + Ok(format) } } diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 8de9e6f009d733..b2035923605e0c 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -5,6 +5,7 @@ use core::fmt::Debug; use derive_more::{Add, AddAssign, Div, DivAssign, Mul, Neg, Sub, SubAssign}; use refineable::Refineable; +use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use std::{ cmp::{self, PartialOrd}, @@ -2201,6 +2202,7 @@ impl From for Radians { PartialEq, Serialize, Deserialize, + JsonSchema, )] #[repr(transparent)] pub struct Pixels(pub f32); diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index e1fcaaba28b4f1..7a6b758a2554ec 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -70,10 +70,10 @@ pub struct LanguageSettings { /// The column at which to soft-wrap lines, for buffers where soft-wrap /// is enabled. pub preferred_line_length: u32, - // Whether to show wrap guides (vertical rulers) in the editor. - // Setting this to true will show a guide at the 'preferred_line_length' value - // if softwrap is set to 'preferred_line_length', and will show any - // additional guides as specified by the 'wrap_guides' setting. + /// Whether to show wrap guides (vertical rulers) in the editor. + /// Setting this to true will show a guide at the 'preferred_line_length' value + /// if softwrap is set to 'preferred_line_length', and will show any + /// additional guides as specified by the 'wrap_guides' setting. pub show_wrap_guides: bool, /// Character counts at which to show wrap guides (vertical rulers) in the editor. pub wrap_guides: Vec, diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 6b5f74c2634b45..102eb1ef2f8563 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -7,10 +7,13 @@ use feature_flags::FeatureFlagAppExt; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext}; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; -use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{ + CodeLabel, Language, LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate, +}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; +use rope::Rope; use serde_json::{json, Value}; use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore}; use smol::{ @@ -202,6 +205,30 @@ impl LspAdapter for JsonLspAdapter { }))) } + async fn label_for_completion( + &self, + item: &lsp::CompletionItem, + language: &Arc, + ) -> Option { + let text = if let Some(description) = item + .label_details + .as_ref() + .and_then(|label_details| label_details.description.as_ref()) + { + format!("{} {}", item.label, description) + } else if let Some(detail) = &item.detail { + format!("{} {}", item.label, detail) + } else { + item.label.clone() + }; + let rope = Rope::from(item.label.as_str()); + let runs = language.highlight_text(&rope, 0..item.label.len()); + Some(language::CodeLabel { + text, + runs, + filter_range: 0..item.label.len(), + }) + } async fn workspace_configuration( self: Arc, _: &Arc, diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index c5f0187c229fa8..361607533b84b0 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -24,12 +24,12 @@ use editor::{ use file_icons::FileIcons; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ - actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, - AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, - EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, - KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, - SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, - VisualContext, WeakView, WindowContext, + actions, anchored, deferred, div, impl_actions, uniform_list, Action, AnyElement, AppContext, + AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, EventEmitter, + FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, KeyContext, Model, + MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, SharedString, Stateful, + Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, VisualContext, + WeakView, WindowContext, }; use itertools::Itertools; use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; @@ -1938,7 +1938,7 @@ impl OutlinePanel { .child( ListItem::new(item_id) .indent_level(depth) - .indent_step_size(px(settings.indent_size)) + .indent_step_size(settings.indent_size) .selected(is_active) .when_some(icon_element, |list_item, icon_element| { list_item.child(h_flex().child(icon_element)) @@ -3801,7 +3801,7 @@ impl Panel for OutlinePanel { DockPosition::Left | DockPosition::Bottom => OutlinePanelDockPosition::Left, DockPosition::Right => OutlinePanelDockPosition::Right, }; - settings.dock = Some(dock); + settings.dock = dock; }, ); } diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index e19fc3c0084947..a8e51b96c5c3aa 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -1,4 +1,5 @@ -use gpui::Pixels; +use anyhow; +use gpui::{px, Pixels}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; @@ -10,66 +11,51 @@ pub enum OutlinePanelDockPosition { Right, } -#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, JsonSchema)] pub struct OutlinePanelSettings { - pub button: bool, - pub default_width: Pixels, - pub dock: OutlinePanelDockPosition, - pub file_icons: bool, - pub folder_icons: bool, - pub git_status: bool, - pub indent_size: f32, - pub auto_reveal_entries: bool, - pub auto_fold_dirs: bool, -} - -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct OutlinePanelSettingsContent { /// Whether to show the outline panel button in the status bar. - /// - /// Default: true - pub button: Option, + pub button: bool, /// Customize default width (in pixels) taken by outline panel - /// - /// Default: 240 - pub default_width: Option, + pub default_width: Pixels, /// The position of outline panel - /// - /// Default: left - pub dock: Option, + pub dock: OutlinePanelDockPosition, /// Whether to show file icons in the outline panel. - /// - /// Default: true - pub file_icons: Option, + pub file_icons: bool, /// Whether to show folder icons or chevrons for directories in the outline panel. - /// - /// Default: true - pub folder_icons: Option, + pub folder_icons: bool, /// Whether to show the git status in the outline panel. - /// - /// Default: true - pub git_status: Option, + pub git_status: bool, /// Amount of indentation (in pixels) for nested items. - /// - /// Default: 20 - pub indent_size: Option, + pub indent_size: Pixels, /// Whether to reveal it in the outline panel automatically, /// when a corresponding project entry becomes active. /// Gitignored entries are never auto revealed. - /// - /// Default: true - pub auto_reveal_entries: Option, + pub auto_reveal_entries: bool, /// Whether to fold directories automatically /// when directory has only one directory inside. - /// - /// Default: true - pub auto_fold_dirs: Option, + pub auto_fold_dirs: bool, +} + +impl Default for OutlinePanelSettings { + fn default() -> Self { + Self { + button: true, + default_width: px(240.), + dock: OutlinePanelDockPosition::Left, + file_icons: true, + folder_icons: true, + auto_fold_dirs: true, + auto_reveal_entries: true, + indent_size: px(20.), + git_status: true, + } + } } impl Settings for OutlinePanelSettings { const KEY: Option<&'static str> = Some("outline_panel"); - type FileContent = OutlinePanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/performance/src/performance.rs b/crates/performance/src/performance.rs new file mode 100644 index 00000000000000..db2388c59ae1c0 --- /dev/null +++ b/crates/performance/src/performance.rs @@ -0,0 +1,184 @@ +use std::time::Instant; + +use anyhow::Result; +use gpui::{ + div, AppContext, InteractiveElement as _, Render, StatefulInteractiveElement as _, + Subscription, ViewContext, VisualContext, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources, SettingsStore}; +use workspace::{ + ui::{Label, LabelCommon, LabelSize, Tooltip}, + ItemHandle, StatusItemView, Workspace, +}; + +const SHOW_STARTUP_TIME_DURATION: std::time::Duration = std::time::Duration::from_secs(5); + +pub fn init(cx: &mut AppContext) { + PerformanceSettings::register(cx); + + let mut enabled = PerformanceSettings::get_global(cx).show_in_status_bar; + let start_time = Instant::now(); + let mut _observe_workspaces = toggle_status_bar_items(enabled, start_time, cx); + + cx.observe_global::(move |cx| { + let new_value = PerformanceSettings::get_global(cx).show_in_status_bar; + if new_value != enabled { + enabled = new_value; + _observe_workspaces = toggle_status_bar_items(enabled, start_time, cx); + } + }) + .detach(); +} + +fn toggle_status_bar_items( + enabled: bool, + start_time: Instant, + cx: &mut AppContext, +) -> Option { + for window in cx.windows() { + if let Some(workspace) = window.downcast::() { + workspace + .update(cx, |workspace, cx| { + toggle_status_bar_item(workspace, enabled, start_time, cx); + }) + .ok(); + } + } + + if enabled { + log::info!("performance metrics display enabled"); + Some(cx.observe_new_views::(move |workspace, cx| { + toggle_status_bar_item(workspace, true, start_time, cx); + })) + } else { + log::info!("performance metrics display disabled"); + None + } +} + +struct PerformanceStatusBarItem { + display_mode: DisplayMode, +} + +#[derive(Copy, Clone, Debug)] +enum DisplayMode { + StartupTime, + Fps, +} + +impl PerformanceStatusBarItem { + fn new(start_time: Instant, cx: &mut ViewContext) -> Self { + let now = Instant::now(); + let display_mode = if now < start_time + SHOW_STARTUP_TIME_DURATION { + DisplayMode::StartupTime + } else { + DisplayMode::Fps + }; + + let this = Self { display_mode }; + + if let DisplayMode::StartupTime = display_mode { + cx.spawn(|this, mut cx| async move { + let now = Instant::now(); + let remaining_duration = + (start_time + SHOW_STARTUP_TIME_DURATION).saturating_duration_since(now); + cx.background_executor().timer(remaining_duration).await; + this.update(&mut cx, |this, cx| { + this.display_mode = DisplayMode::Fps; + cx.notify(); + }) + .ok(); + }) + .detach(); + } + + this + } +} + +impl Render for PerformanceStatusBarItem { + fn render(&mut self, cx: &mut gpui::ViewContext) -> impl gpui::IntoElement { + let text = match self.display_mode { + DisplayMode::StartupTime => cx + .time_to_first_window_draw() + .map_or("Pending".to_string(), |duration| { + format!("{}ms", duration.as_millis()) + }), + DisplayMode::Fps => cx.fps().map_or("".to_string(), |fps| { + format!("{:3} FPS", fps.round() as u32) + }), + }; + + use gpui::ParentElement; + let display_mode = self.display_mode; + div() + .id("performance status") + .child(Label::new(text).size(LabelSize::Small)) + .tooltip(move |cx| match display_mode { + DisplayMode::StartupTime => Tooltip::text("Time to first window draw", cx), + DisplayMode::Fps => cx + .new_view(|cx| { + let tooltip = Tooltip::new("Current FPS"); + if let Some(time_to_first) = cx.time_to_first_window_draw() { + tooltip.meta(format!( + "Time to first window draw: {}ms", + time_to_first.as_millis() + )) + } else { + tooltip + } + }) + .into(), + }) + } +} + +impl StatusItemView for PerformanceStatusBarItem { + fn set_active_pane_item( + &mut self, + _active_pane_item: Option<&dyn ItemHandle>, + _cx: &mut gpui::ViewContext, + ) { + // This is not currently used. + } +} + +fn toggle_status_bar_item( + workspace: &mut Workspace, + enabled: bool, + start_time: Instant, + cx: &mut ViewContext, +) { + if enabled { + workspace.status_bar().update(cx, |bar, cx| { + bar.add_right_item( + cx.new_view(|cx| PerformanceStatusBarItem::new(start_time, cx)), + cx, + ) + }); + } else { + workspace.status_bar().update(cx, |bar, cx| { + bar.remove_items_of_type::(cx); + }); + } +} + +/// Configuration of the display of performance details. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +#[serde(default)] +pub struct PerformanceSettings { + /// Display the time to first window draw and frame rate in the status bar. + pub show_in_status_bar: bool, +} + +impl Settings for PerformanceSettings { + const KEY: Option<&'static str> = Some("performance"); + + type FileContent = Self; + + fn load(sources: SettingsSources, _: &mut AppContext) -> Result { + sources.json_merge() + } +} diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 70b2eccf237c62..3c21b1c5e8caf1 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -20,6 +20,7 @@ use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId}; use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent}; #[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct ProjectSettings { /// Configuration for language servers. /// @@ -41,7 +42,6 @@ pub struct ProjectSettings { pub load_direnv: DirenvSettings, /// Configuration for session-related features - #[serde(default)] pub session: SessionSettings, } @@ -59,36 +59,31 @@ pub enum DirenvSettings { } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct GitSettings { /// Whether or not to show the git gutter. /// /// Default: tracked_files - pub git_gutter: Option, + pub git_gutter: GitGutterSetting, pub gutter_debounce: Option, /// Whether or not to show git blame data inline in /// the currently focused line. /// /// Default: on - pub inline_blame: Option, + pub inline_blame: InlineBlameSettings, } impl GitSettings { pub fn inline_blame_enabled(&self) -> bool { #[allow(unknown_lints, clippy::manual_unwrap_or_default)] - match self.inline_blame { - Some(InlineBlameSettings { enabled, .. }) => enabled, - _ => false, - } + self.inline_blame.enabled } pub fn inline_blame_delay(&self) -> Option { - match self.inline_blame { - Some(InlineBlameSettings { - delay_ms: Some(delay_ms), - .. - }) if delay_ms > 0 => Some(Duration::from_millis(delay_ms)), - _ => None, - } + self.inline_blame + .delay_ms + .gt(&0) + .then(|| Duration::from_millis(self.inline_blame.delay_ms)) } } @@ -102,28 +97,34 @@ pub enum GitGutterSetting { Hide, } -#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] +#[serde(default)] pub struct InlineBlameSettings { /// Whether or not to show git blame data inline in /// the currently focused line. /// /// Default: true - #[serde(default = "true_value")] pub enabled: bool, /// Whether to only show the inline blame information /// after a delay once the cursor stops moving. /// /// Default: 0 - pub delay_ms: Option, + pub delay_ms: u64, /// The minimum column number to show the inline blame information at /// /// Default: 0 - pub min_column: Option, + pub min_column: u32, } -const fn true_value() -> bool { - true +impl Default for InlineBlameSettings { + fn default() -> Self { + Self { + enabled: true, + delay_ms: 0, + min_column: 0, + } + } } #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c77a2170dd01cd..6ca843875b48a5 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2289,7 +2289,7 @@ impl ProjectPanel { .child( ListItem::new(entry_id.to_proto() as usize) .indent_level(depth) - .indent_step_size(px(settings.indent_size)) + .indent_step_size(settings.indent_size) .selected(is_marked || is_active) .when_some(canonical_path, |this, path| { this.end_slot::( @@ -2817,7 +2817,7 @@ impl Render for DraggedProjectEntryView { this.bg(cx.theme().colors().background).w(self.width).child( ListItem::new(self.selection.entry_id.to_proto() as usize) .indent_level(self.details.depth) - .indent_step_size(px(settings.indent_size)) + .indent_step_size(settings.indent_size) .child(if let Some(icon) = &self.details.icon { div().child(Icon::from_path(icon.clone())) } else { @@ -2855,7 +2855,7 @@ impl Panel for ProjectPanel { DockPosition::Left | DockPosition::Bottom => ProjectPanelDockPosition::Left, DockPosition::Right => ProjectPanelDockPosition::Right, }; - settings.dock = Some(dock); + settings.dock = dock; }, ); } @@ -3029,7 +3029,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { worktree_settings.file_scan_exclusions = - Some(vec!["**/.git".to_string(), "**/4/**".to_string()]); + vec!["**/.git".to_string(), "**/4/**".to_string()]; }); }); }); @@ -4818,10 +4818,10 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Some(Vec::new()); + worktree_settings.file_scan_exclusions = Vec::new(); }); store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = Some(false) + project_panel_settings.auto_reveal_entries = false }); }) }); @@ -4940,7 +4940,7 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = Some(true) + project_panel_settings.auto_reveal_entries = true }); }) }); @@ -5054,10 +5054,10 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Some(Vec::new()); + worktree_settings.file_scan_exclusions = Vec::new(); }); store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = Some(false) + project_panel_settings.auto_reveal_entries = false }); }) }); @@ -5256,7 +5256,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]); + vec!["excluded_dir".to_string(), "**/.git".to_string()]; }); }); }); @@ -5569,10 +5569,10 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_fold_dirs = Some(false); + project_panel_settings.auto_fold_dirs = false; }); store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Some(Vec::new()); + worktree_settings.file_scan_exclusions = Vec::new(); }); }); }); @@ -5591,10 +5591,10 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_fold_dirs = Some(false); + project_panel_settings.auto_fold_dirs = false; }); store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Some(Vec::new()); + worktree_settings.file_scan_exclusions = Vec::new(); }); }); }); diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 4d73ae92456da9..6910b4627a00a0 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -2,6 +2,7 @@ use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; +use ui::px; #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)] #[serde(rename_all = "snake_case")] @@ -10,20 +11,50 @@ pub enum ProjectPanelDockPosition { Right, } -#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, JsonSchema)] +#[serde(default)] pub struct ProjectPanelSettings { + /// Whether to show the project panel button in the status bar. pub button: bool, + /// Customize default width (in pixels) taken by project panel pub default_width: Pixels, + /// The position of project panel pub dock: ProjectPanelDockPosition, + /// Whether to show file icons in the project panel. pub file_icons: bool, + /// Whether to show folder icons or chevrons for directories in the project panel. pub folder_icons: bool, + /// Whether to show the git status in the project panel. pub git_status: bool, - pub indent_size: f32, + /// Amount of indentation (in pixels) for nested items. + pub indent_size: Pixels, + /// Whether to reveal it in the project panel automatically, + /// when a corresponding project entry becomes active. + /// Gitignored entries are never auto revealed. pub auto_reveal_entries: bool, + /// Whether to fold directories automatically + /// when directory has only one directory inside. pub auto_fold_dirs: bool, + /// Scrollbar-related settings pub scrollbar: ScrollbarSettings, } +impl Default for ProjectPanelSettings { + fn default() -> Self { + Self { + button: true, + default_width: px(240.), + dock: ProjectPanelDockPosition::Left, + file_icons: true, + folder_icons: true, + git_status: true, + indent_size: px(20.), + auto_reveal_entries: true, + auto_fold_dirs: true, + scrollbar: Default::default(), + } + } +} /// When to show the scrollbar in the project panel. /// /// Default: always @@ -37,7 +68,7 @@ pub enum ShowScrollbar { Never, } -#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettings { /// When to show the scrollbar in the project panel. /// @@ -45,63 +76,10 @@ pub struct ScrollbarSettings { pub show: ShowScrollbar, } -#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -pub struct ScrollbarSettingsContent { - /// When to show the scrollbar in the project panel. - /// - /// Default: always - pub show: Option, -} - -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -pub struct ProjectPanelSettingsContent { - /// Whether to show the project panel button in the status bar. - /// - /// Default: true - pub button: Option, - /// Customize default width (in pixels) taken by project panel - /// - /// Default: 240 - pub default_width: Option, - /// The position of project panel - /// - /// Default: left - pub dock: Option, - /// Whether to show file icons in the project panel. - /// - /// Default: true - pub file_icons: Option, - /// Whether to show folder icons or chevrons for directories in the project panel. - /// - /// Default: true - pub folder_icons: Option, - /// Whether to show the git status in the project panel. - /// - /// Default: true - pub git_status: Option, - /// Amount of indentation (in pixels) for nested items. - /// - /// Default: 20 - pub indent_size: Option, - /// Whether to reveal it in the project panel automatically, - /// when a corresponding project entry becomes active. - /// Gitignored entries are never auto revealed. - /// - /// Default: true - pub auto_reveal_entries: Option, - /// Whether to fold directories automatically - /// when directory has only one directory inside. - /// - /// Default: false - pub auto_fold_dirs: Option, - /// Scrollbar-related settings - pub scrollbar: Option, -} - impl Settings for ProjectPanelSettings { const KEY: Option<&'static str> = Some("project_panel"); - type FileContent = ProjectPanelSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index d8b10f31f9f55e..b7fa6359459290 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -48,7 +48,6 @@ use workspace::{notifications::DetachAndPromptErr, AppState, ModalView, Workspac use crate::open_dev_server_project; use crate::ssh_connections::connect_over_ssh; use crate::ssh_connections::open_ssh_project; -use crate::ssh_connections::RemoteSettingsContent; use crate::ssh_connections::SshConnection; use crate::ssh_connections::SshConnectionModal; use crate::ssh_connections::SshProject; @@ -1024,7 +1023,7 @@ impl DevServerProjects { fn update_settings_file( &mut self, cx: &mut ViewContext, - f: impl FnOnce(&mut RemoteSettingsContent) + Send + Sync + 'static, + f: impl FnOnce(&mut SshSettings) + Send + Sync + 'static, ) { let Some(fs) = self .workspace diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 8da4284b7f56a7..b54196022dbcde 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -22,8 +22,24 @@ use ui::{ use util::paths::PathWithPosition; use workspace::{AppState, ModalView, Workspace}; -#[derive(Deserialize)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct SshSettings { + /// ssh_connections is an array of ssh connections. + /// By default this setting is null, which disables the direct ssh connection support. + /// You can configure these from `project: Open Remote` in the command palette. + /// Zed's ssh support will pull configuration from your ~/.ssh too. + /// Examples: + /// [ + /// { + /// "host": "example-box", + /// "projects": [ + /// { + /// "paths": ["/home/user/code/zed"] + /// } + /// ] + /// } + /// ] pub ssh_connections: Option>, } @@ -62,15 +78,10 @@ pub struct SshProject { pub paths: Vec, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct RemoteSettingsContent { - pub ssh_connections: Option>, -} - impl Settings for SshSettings { const KEY: Option<&'static str> = None; - type FileContent = RemoteSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/repl/src/jupyter_settings.rs b/crates/repl/src/jupyter_settings.rs index aefef6cec5b44d..f441da4790b804 100644 --- a/crates/repl/src/jupyter_settings.rs +++ b/crates/repl/src/jupyter_settings.rs @@ -6,8 +6,10 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Debug, Default)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct JupyterSettings { + /// Default kernels to select for each language. pub kernel_selections: HashMap, } @@ -20,26 +22,10 @@ impl JupyterSettings { } } -#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] -pub struct JupyterSettingsContent { - /// Default kernels to select for each language. - /// - /// Default: `{}` - pub kernel_selections: Option>, -} - -impl Default for JupyterSettingsContent { - fn default() -> Self { - JupyterSettingsContent { - kernel_selections: Some(HashMap::new()), - } - } -} - impl Settings for JupyterSettings { const KEY: Option<&'static str> = Some("jupyter"); - type FileContent = JupyterSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, @@ -51,10 +37,8 @@ impl Settings for JupyterSettings { let mut settings = JupyterSettings::default(); for value in sources.defaults_and_customizations() { - if let Some(source) = &value.kernel_selections { - for (k, v) in source { - settings.kernel_selections.insert(k.clone(), v.clone()); - } + for (k, v) in &value.kernel_selections { + settings.kernel_selections.insert(k.clone(), v.clone()); } } diff --git a/crates/tasks_ui/src/settings.rs b/crates/tasks_ui/src/settings.rs index 1bcd4962644232..4ad6f607b76c3a 100644 --- a/crates/tasks_ui/src/settings.rs +++ b/crates/tasks_ui/src/settings.rs @@ -2,22 +2,26 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Serialize, Deserialize, PartialEq, Default)] +#[derive(Clone, Serialize, Deserialize, PartialEq, JsonSchema)] +#[serde(default)] +/// Task-related settings. pub(crate) struct TaskSettings { + /// Whether to show task status indicator in the status bar. Default: true pub(crate) show_status_indicator: bool, } -/// Task-related settings. -#[derive(Serialize, Deserialize, PartialEq, Default, Clone, JsonSchema)] -pub(crate) struct TaskSettingsContent { - /// Whether to show task status indicator in the status bar. Default: true - show_status_indicator: Option, +impl Default for TaskSettings { + fn default() -> Self { + Self { + show_status_indicator: true, + } + } } impl Settings for TaskSettings { const KEY: Option<&'static str> = Some("task"); - type FileContent = TaskSettingsContent; + type FileContent = Self; fn load( sources: SettingsSources, diff --git a/crates/vim/src/digraph.rs b/crates/vim/src/digraph.rs index 443b7ff37801eb..282016cfdad277 100644 --- a/crates/vim/src/digraph.rs +++ b/crates/vim/src/digraph.rs @@ -132,7 +132,7 @@ mod test { let mut custom_digraphs = HashMap::default(); custom_digraphs.insert("|-".into(), "⊢".into()); custom_digraphs.insert(":)".into(), "👨‍💻".into()); - s.custom_digraphs = Some(custom_digraphs); + s.custom_digraphs = custom_digraphs; }); }); diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 8198c0da53b4f0..815086d0be0470 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -1184,7 +1184,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_multiline_find = Some(true); + s.use_multiline_find = true; }); }); @@ -1226,7 +1226,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_multiline_find = Some(true); + s.use_multiline_find = true; }); }); @@ -1268,7 +1268,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_smartcase_find = Some(true); + s.use_smartcase_find = true; }); }); diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index 05469dbf9f168f..6465e33e0f3b53 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -291,7 +291,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); @@ -327,7 +327,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::OnYank) + s.use_system_clipboard = UseSystemClipboard::OnYank }); }); @@ -584,7 +584,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); @@ -630,7 +630,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); @@ -659,7 +659,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); @@ -707,7 +707,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = Some(UseSystemClipboard::Never) + s.use_system_clipboard = UseSystemClipboard::Never }); }); diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index f89faa3748372f..6a20ea4eb33bb1 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -294,7 +294,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.scroll_beyond_last_line = Some(ScrollBeyondLastLine::Off) + s.scroll_beyond_last_line = ScrollBeyondLastLine::Off }); }); diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 28f33d49d85f20..6418475ad2f576 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -542,7 +542,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| s.search_wrap = Some(false)); + store.update_user_settings::(cx, |s| s.search_wrap = false); }); cx.set_state("ˇhi\nhigh\nhi\n", Mode::Normal); @@ -655,7 +655,7 @@ mod test { // check that searching with unable search wrap cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| s.search_wrap = Some(false)); + store.update_user_settings::(cx, |s| s.search_wrap = false); }); cx.set_state("aa\nbˇb\ncc\ncc\ncc\n", Mode::Normal); cx.simulate_keystrokes("/ c c enter"); diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 9c61e9cd938292..be7db47315f22d 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -1300,7 +1300,7 @@ async fn test_command_alias(cx: &mut gpui::TestAppContext) { store.update_user_settings::(cx, |s| { let mut aliases = HashMap::default(); aliases.insert("Q".to_string(), "upper".to_string()); - s.command_aliases = Some(aliases) + s.command_aliases = aliases }); }); diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index c985f68e701eb1..b68d2ede8b09b7 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -57,7 +57,7 @@ impl VimTestContext { pub fn new_with_lsp(mut cx: EditorLspTestContext, enabled: bool) -> VimTestContext { cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = Some(enabled)); + store.update_user_settings::(cx, |s| *s = VimModeSetting(enabled)); }); settings::KeymapFile::load_asset("keymaps/default-macos.json", cx).unwrap(); if enabled { @@ -105,7 +105,7 @@ impl VimTestContext { pub fn enable_vim(&mut self) { self.cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = Some(true)); + store.update_user_settings::(cx, |s| *s = VimModeSetting(true)); }); }) } @@ -113,7 +113,7 @@ impl VimTestContext { pub fn disable_vim(&mut self) { self.cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = Some(false)); + store.update_user_settings::(cx, |s| *s = VimModeSetting(false)); }); }) } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 6e03374c22595e..6baca17948e8ef 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -46,6 +46,8 @@ use crate::state::ReplayableAction; /// Whether or not to enable Vim mode. /// /// Default: false +#[derive(Copy, Clone, Default, Deserialize, Serialize, JsonSchema)] +#[serde(default, transparent)] pub struct VimModeSetting(pub bool); /// An Action to Switch between modes @@ -99,7 +101,7 @@ pub fn init(cx: &mut AppContext) { let fs = workspace.app_state().fs.clone(); let currently_enabled = Vim::enabled(cx); update_settings_file::(fs, cx, move |setting, _| { - *setting = Some(!currently_enabled) + *setting = VimModeSetting(!currently_enabled); }) }); @@ -1068,12 +1070,10 @@ impl Vim { impl Settings for VimModeSetting { const KEY: Option<&'static str> = Some("vim_mode"); - type FileContent = Option; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { - Ok(Self(sources.user.copied().flatten().unwrap_or( - sources.default.ok_or_else(Self::missing_default)?, - ))) + Ok(sources.user.copied().unwrap_or(*sources.default)) } } @@ -1089,7 +1089,8 @@ pub enum UseSystemClipboard { OnYank, } -#[derive(Deserialize)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] struct VimSettings { pub toggle_relative_line_numbers: bool, pub use_system_clipboard: UseSystemClipboard, @@ -1098,19 +1099,22 @@ struct VimSettings { pub custom_digraphs: HashMap>, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -struct VimSettingsContent { - pub toggle_relative_line_numbers: Option, - pub use_system_clipboard: Option, - pub use_multiline_find: Option, - pub use_smartcase_find: Option, - pub custom_digraphs: Option>>, +impl Default for VimSettings { + fn default() -> Self { + Self { + toggle_relative_line_numbers: false, + use_system_clipboard: UseSystemClipboard::Always, + use_multiline_find: false, + use_smartcase_find: false, + custom_digraphs: Default::default(), + } + } } impl Settings for VimSettings { const KEY: Option<&'static str> = Some("vim"); - type FileContent = VimSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/welcome/src/base_keymap_picker.rs b/crates/welcome/src/base_keymap_picker.rs index 96a9df9c3c8cca..fd7361f9b3d002 100644 --- a/crates/welcome/src/base_keymap_picker.rs +++ b/crates/welcome/src/base_keymap_picker.rs @@ -177,7 +177,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate { .report_setting_event("keymap", base_keymap.to_string()); update_settings_file::(self.fs.clone(), cx, move |setting, _| { - *setting = Some(base_keymap) + *setting = base_keymap; }); } diff --git a/crates/welcome/src/base_keymap_setting.rs b/crates/welcome/src/base_keymap_setting.rs index 1b52bbc9f94fbd..0c1724627cf6d9 100644 --- a/crates/welcome/src/base_keymap_setting.rs +++ b/crates/welcome/src/base_keymap_setting.rs @@ -87,15 +87,15 @@ impl BaseKeymap { impl Settings for BaseKeymap { const KEY: Option<&'static str> = Some("base_keymap"); - type FileContent = Option; + type FileContent = Self; fn load( sources: SettingsSources, _: &mut gpui::AppContext, ) -> anyhow::Result { - if let Some(Some(user_value)) = sources.user.copied() { + if let Some(user_value) = sources.user.copied() { return Ok(user_value); } - sources.default.ok_or_else(Self::missing_default) + Ok(*sources.default) } } diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index fc837c68671a31..787c2e589bb0fe 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -188,7 +188,7 @@ impl Render for WelcomePage { this.update_settings::( selection, cx, - |setting, value| *setting = Some(value), + |setting, value| *setting = VimModeSetting(value), ); }), )) diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 935f0268b62ffe..46b8f3bf7fa90b 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -36,20 +36,49 @@ use util::ResultExt; pub const LEADER_UPDATE_THROTTLE: Duration = Duration::from_millis(200); -#[derive(Deserialize)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct ItemSettings { + /// Whether to show the Git file status on a tab item. pub git_status: bool, + /// Position of the close button in a tab. pub close_position: ClosePosition, + /// Whether to show the file icon for a tab. pub file_icons: bool, } -#[derive(Deserialize)] +impl Default for ItemSettings { + fn default() -> Self { + Self { + git_status: false, + close_position: ClosePosition::Right, + file_icons: false, + } + } +} + +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct PreviewTabsSettings { + /// Whether to show opened editors as preview tabs. + /// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic. pub enabled: bool, + /// Whether to open tabs in preview mode when selected from the file finder. pub enable_preview_from_file_finder: bool, + /// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. pub enable_preview_from_code_navigation: bool, } +impl Default for PreviewTabsSettings { + fn default() -> Self { + Self { + enabled: true, + enable_preview_from_file_finder: false, + enable_preview_from_code_navigation: false, + } + } +} + #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "lowercase")] pub enum ClosePosition { @@ -67,43 +96,10 @@ impl ClosePosition { } } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct ItemSettingsContent { - /// Whether to show the Git file status on a tab item. - /// - /// Default: false - git_status: Option, - /// Position of the close button in a tab. - /// - /// Default: right - close_position: Option, - /// Whether to show the file icon for a tab. - /// - /// Default: false - file_icons: Option, -} - -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct PreviewTabsSettingsContent { - /// Whether to show opened editors as preview tabs. - /// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic. - /// - /// Default: true - enabled: Option, - /// Whether to open tabs in preview mode when selected from the file finder. - /// - /// Default: false - enable_preview_from_file_finder: Option, - /// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. - /// - /// Default: false - enable_preview_from_code_navigation: Option, -} - impl Settings for ItemSettings { const KEY: Option<&'static str> = Some("tabs"); - type FileContent = ItemSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() @@ -113,7 +109,7 @@ impl Settings for ItemSettings { impl Settings for PreviewTabsSettings { const KEY: Option<&'static str> = Some("preview_tabs"); - type FileContent = PreviewTabsSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 7371e56cee2376..50b92326b28b19 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -6418,7 +6418,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = Some(AutosaveSetting::OnWindowChange); + settings.autosave = AutosaveSetting::OnWindowChange; }) }); item.is_dirty = true; @@ -6438,7 +6438,7 @@ mod tests { cx.focus_self(); SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = Some(AutosaveSetting::OnFocusChange); + settings.autosave = AutosaveSetting::OnFocusChange; }) }); item.is_dirty = true; @@ -6461,7 +6461,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = Some(AutosaveSetting::AfterDelay { milliseconds: 500 }); + settings.autosave = AutosaveSetting::AfterDelay { milliseconds: 500 }; }) }); item.is_dirty = true; @@ -6480,7 +6480,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = Some(AutosaveSetting::OnFocusChange); + settings.autosave = AutosaveSetting::OnFocusChange; }) }); item.is_dirty = true; diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index 52827c6941ae7d..f87840eb308388 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -5,22 +5,58 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Deserialize)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct WorkspaceSettings { + /// Scale by which to zoom the active pane. + /// When set to 1.0, the active pane has the same size as others, + /// but when set to a larger value, the active pane takes up more space. pub active_pane_magnification: f32, + /// Direction to split horizontally. pub pane_split_direction_horizontal: PaneSplitDirectionHorizontal, + /// Direction to split vertically. pub pane_split_direction_vertical: PaneSplitDirectionVertical, + /// Centered layout related settings. pub centered_layout: CenteredLayoutSettings, + /// Whether or not to prompt the user to confirm before closing the application. pub confirm_quit: bool, + /// Whether or not to show the call status icon in the status bar. pub show_call_status_icon: bool, + /// When to automatically save edited buffers. pub autosave: AutosaveSetting, + /// Controls previous session restoration in freshly launched Zed instance. pub restore_on_startup: RestoreOnStartupBehavior, + /// The size of the workspace split drop targets on the outer edges. + /// Given as a fraction that will be multiplied by the smaller dimension of the workspace. pub drop_target_size: f32, + /// Whether to close the window when using 'close active item' on a workspace with no tabs pub when_closing_with_no_tabs: CloseWindowWhenNoItems, + /// Whether to use the system provided dialogs for Open and Save As. + /// When set to false, Zed will use the built-in keyboard-first pickers. pub use_system_path_prompts: bool, + /// Aliases for the command palette. When you type a key in this map, + /// it will be assumed to equal the value. pub command_aliases: HashMap, } +impl Default for WorkspaceSettings { + fn default() -> Self { + Self { + active_pane_magnification: 1.0, + pane_split_direction_horizontal: PaneSplitDirectionHorizontal::Up, + pane_split_direction_vertical: PaneSplitDirectionVertical::Left, + centered_layout: CenteredLayoutSettings::default(), + confirm_quit: false, + show_call_status_icon: true, + autosave: AutosaveSetting::Off, + restore_on_startup: RestoreOnStartupBehavior::default(), + drop_target_size: 0.2, + when_closing_with_no_tabs: CloseWindowWhenNoItems::default(), + use_system_path_prompts: true, + command_aliases: HashMap::default(), + } + } +} #[derive(Copy, Clone, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CloseWindowWhenNoItems { @@ -55,77 +91,22 @@ pub enum RestoreOnStartupBehavior { LastSession, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct WorkspaceSettingsContent { - /// Scale by which to zoom the active pane. - /// When set to 1.0, the active pane has the same size as others, - /// but when set to a larger value, the active pane takes up more space. - /// - /// Default: `1.0` - pub active_pane_magnification: Option, - // Direction to split horizontally. - // - // Default: "up" - pub pane_split_direction_horizontal: Option, - // Direction to split vertically. - // - // Default: "left" - pub pane_split_direction_vertical: Option, - // Centered layout related settings. - pub centered_layout: Option, - /// Whether or not to prompt the user to confirm before closing the application. - /// - /// Default: false - pub confirm_quit: Option, - /// Whether or not to show the call status icon in the status bar. - /// - /// Default: true - pub show_call_status_icon: Option, - /// When to automatically save edited buffers. - /// - /// Default: off - pub autosave: Option, - /// Controls previous session restoration in freshly launched Zed instance. - /// Values: none, last_workspace, last_session - /// Default: last_session - pub restore_on_startup: Option, - /// The size of the workspace split drop targets on the outer edges. - /// Given as a fraction that will be multiplied by the smaller dimension of the workspace. - /// - /// Default: `0.2` (20% of the smaller dimension of the workspace) - pub drop_target_size: Option, - /// Whether to close the window when using 'close active item' on a workspace with no tabs - /// - /// Default: auto ("on" on macOS, "off" otherwise) - pub when_closing_with_no_tabs: Option, - /// Whether to use the system provided dialogs for Open and Save As. - /// When set to false, Zed will use the built-in keyboard-first pickers. - /// - /// Default: true - pub use_system_path_prompts: Option, - /// Aliases for the command palette. When you type a key in this map, - /// it will be assumed to equal the value. - /// - /// Default: true - pub command_aliases: Option>, -} - -#[derive(Deserialize)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct TabBarSettings { + /// Whether or not to show the tab bar in the editor. pub show: bool, + /// Whether or not to show the navigation history buttons in the tab bar. pub show_nav_history_buttons: bool, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct TabBarSettingsContent { - /// Whether or not to show the tab bar in the editor. - /// - /// Default: true - pub show: Option, - /// Whether or not to show the navigation history buttons in the tab bar. - /// - /// Default: true - pub show_nav_history_buttons: Option, +impl Default for TabBarSettings { + fn default() -> Self { + Self { + show_nav_history_buttons: true, + show: true, + } + } } #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] @@ -163,17 +144,26 @@ pub struct CenteredLayoutSettings { /// /// Default: 0.2 pub left_padding: Option, - // The relative width of the right padding of the central pane from the - // workspace when the centered layout is used. + /// The relative width of the right padding of the central pane from the + /// workspace when the centered layout is used. /// /// Default: 0.2 pub right_padding: Option, } +impl Default for CenteredLayoutSettings { + fn default() -> Self { + Self { + left_padding: Some(0.2), + right_padding: Some(0.2), + } + } +} + impl Settings for WorkspaceSettings { const KEY: Option<&'static str> = None; - type FileContent = WorkspaceSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() @@ -183,7 +173,7 @@ impl Settings for WorkspaceSettings { impl Settings for TabBarSettings { const KEY: Option<&'static str> = Some("tab_bar"); - type FileContent = TabBarSettingsContent; + type FileContent = Self; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index 32851d963a1321..82be3a80287928 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -25,7 +25,8 @@ impl WorktreeSettings { } } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Serialize, Deserialize, JsonSchema)] +#[serde(default)] pub struct WorktreeSettingsContent { /// Completely ignore files matching globs from `file_scan_exclusions` /// @@ -39,12 +40,42 @@ pub struct WorktreeSettingsContent { /// "**/.classpath", /// "**/.settings" /// ] - #[serde(default)] - pub file_scan_exclusions: Option>, + pub file_scan_exclusions: Vec, /// Treat the files matching these globs as `.env` files. /// Default: [ "**/.env*" ] - pub private_files: Option>, + pub private_files: Vec, +} + +impl Default for WorktreeSettingsContent { + fn default() -> Self { + Self { + private_files: [ + "**/.env*", + "**/*.pem", + "**/*.key", + "**/*.cert", + "**/*.crt", + "**/secrets.yml", + ] + .into_iter() + .map(str::to_owned) + .collect(), + file_scan_exclusions: [ + "**/.git", + "**/.svn", + "**/.hg", + "**/CVS", + "**/.DS_Store", + "**/Thumbs.db", + "**/.classpath", + "**/.settings", + ] + .into_iter() + .map(str::to_owned) + .collect(), + } + } } impl Settings for WorktreeSettings { @@ -57,8 +88,8 @@ impl Settings for WorktreeSettings { _: &mut AppContext, ) -> anyhow::Result { let result: WorktreeSettingsContent = sources.json_merge()?; - let mut file_scan_exclusions = result.file_scan_exclusions.unwrap_or_default(); - let mut private_files = result.private_files.unwrap_or_default(); + let mut file_scan_exclusions = result.file_scan_exclusions; + let mut private_files = result.private_files; file_scan_exclusions.sort(); private_files.sort(); Ok(Self { diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 929dc01c6d17e9..455bc62a79a874 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -673,7 +673,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = Some(Vec::new()); + project_settings.file_scan_exclusions = Vec::new(); }); }); }); @@ -910,7 +910,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); + vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; }); }); }); @@ -945,8 +945,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = - Some(vec!["**/node_modules/**".to_string()]); + project_settings.file_scan_exclusions = vec!["**/node_modules/**".to_string()]; }); }); }); @@ -1009,11 +1008,11 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = Some(vec![ + project_settings.file_scan_exclusions = vec![ "**/.git".to_string(), "node_modules/".to_string(), "build_output".to_string(), - ]); + ]; }); }); }); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 93fee57ecdb654..9f670efcd7c5fe 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1996,7 +1996,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]); + vec!["excluded_dir".to_string(), "**/.git".to_string()]; }); }); }); From ef5a7e1642ecf7c2c4bb123e66e1bb477c55b255 Mon Sep 17 00:00:00 2001 From: Amin Ahmed Khan Date: Wed, 11 Sep 2024 06:14:43 +0300 Subject: [PATCH 040/762] Fix OpenAI key URL (#17675) Update the create Open AI Key URL Release Notes: - Fixed a link in the Assistant panel to the OpenAI console. --- crates/language_model/src/provider/open_ai.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index a7a962e9251671..3a371499eb44db 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -491,7 +491,7 @@ impl ConfigurationView { impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - const OPENAI_CONSOLE_URL: &str = "https://console.anthropic.com/settings/keys"; + const OPENAI_CONSOLE_URL: &str = "https://platform.openai.com/api-keys"; const INSTRUCTIONS: [&str; 6] = [ "To use the assistant panel or inline assistant, you need to add your OpenAI API key.", " - You can create an API key at: ", From 8088d1a9b7377cc9ae2a8f94ea9a5ce868ffc1d6 Mon Sep 17 00:00:00 2001 From: "Dairon M." Date: Tue, 10 Sep 2024 23:21:24 -0400 Subject: [PATCH 041/762] erlang: Bump to v0.1.0 (#17679) Changes: - https://github.com/zed-industries/zed/pull/14914 - https://github.com/zed-industries/zed/pull/14879 - https://github.com/zed-industries/zed/pull/14923 - https://github.com/zed-industries/zed/pull/15973 - https://github.com/zed-industries/zed/pull/16955 Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- Cargo.lock | 2 +- extensions/erlang/Cargo.toml | 2 +- extensions/erlang/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 07b97e030df7bc..2027c5421f5dda 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14371,7 +14371,7 @@ dependencies = [ [[package]] name = "zed_erlang" -version = "0.0.1" +version = "0.1.0" dependencies = [ "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/extensions/erlang/Cargo.toml b/extensions/erlang/Cargo.toml index 1ac6b4b1b641fc..50673448963bf9 100644 --- a/extensions/erlang/Cargo.toml +++ b/extensions/erlang/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_erlang" -version = "0.0.1" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/erlang/extension.toml b/extensions/erlang/extension.toml index 863da32dfa71a3..23c7cf6d4a97d0 100644 --- a/extensions/erlang/extension.toml +++ b/extensions/erlang/extension.toml @@ -1,7 +1,7 @@ id = "erlang" name = "Erlang" description = "Erlang support." -version = "0.0.1" +version = "0.1.0" schema_version = 1 authors = ["Dairon M ", "Fabian Bergström "] repository = "https://github.com/zed-industries/zed" From 331d28d47982d36c317c80c12851a49422b6468e Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 10 Sep 2024 23:37:37 -0400 Subject: [PATCH 042/762] php: Remove invalid node types from highlights queries (#17680) This PR removes some invalid node types from the PHP highlights queries. Release Notes: - N/A --- extensions/php/languages/php/highlights.scm | 3 --- 1 file changed, 3 deletions(-) diff --git a/extensions/php/languages/php/highlights.scm b/extensions/php/languages/php/highlights.scm index ab1fdf662cf695..6afeb1090b7a0b 100644 --- a/extensions/php/languages/php/highlights.scm +++ b/extensions/php/languages/php/highlights.scm @@ -133,8 +133,5 @@ "trait" @keyword "try" @keyword "use" @keyword -"var" @keyword "while" @keyword "xor" @keyword -"yield" @keyword -"yield from" @keyword From ec189fe884f48109d17ca3f2984c49c933b46253 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 10 Sep 2024 23:58:57 -0400 Subject: [PATCH 043/762] Add a way to join all panes into one (#17673) Closes https://github.com/zed-industries/zed/issues/17536 Closes https://github.com/zed-industries/zed/pull/17548 Release Notes: - Added a way to join all panes into one with `pane::JoinAll` action ([#17536](https://github.com/zed-industries/zed/issues/17536)) --------- Co-authored-by: Yogesh Dhamija --- crates/workspace/src/pane.rs | 19 +-- crates/workspace/src/workspace.rs | 201 ++++++++++++++++++++++++------ 2 files changed, 173 insertions(+), 47 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index cc752f7aec9ff6..d0fa411381a6d6 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -3,6 +3,7 @@ use crate::{ ClosePosition, Item, ItemHandle, ItemSettings, PreviewTabsSettings, TabContentParams, WeakItemHandle, }, + move_item, notifications::NotifyResultExt, toolbar::Toolbar, workspace_settings::{AutosaveSetting, TabBarSettings, WorkspaceSettings}, @@ -149,6 +150,7 @@ actions!( GoBack, GoForward, JoinIntoNext, + JoinAll, ReopenClosedItem, SplitLeft, SplitUp, @@ -188,6 +190,7 @@ pub enum Event { item_id: EntityId, }, Split(SplitDirection), + JoinAll, JoinIntoNext, ChangeItemTitle, Focus, @@ -220,6 +223,7 @@ impl fmt::Debug for Event { .debug_struct("Split") .field("direction", direction) .finish(), + Event::JoinAll => f.write_str("JoinAll"), Event::JoinIntoNext => f.write_str("JoinIntoNext"), Event::ChangeItemTitle => f.write_str("ChangeItemTitle"), Event::Focus => f.write_str("Focus"), @@ -679,6 +683,10 @@ impl Pane { cx.emit(Event::JoinIntoNext); } + fn join_all(&mut self, cx: &mut ViewContext) { + cx.emit(Event::JoinAll); + } + fn history_updated(&mut self, cx: &mut ViewContext) { self.toolbar.update(cx, |_, cx| cx.notify()); } @@ -1757,9 +1765,7 @@ impl Pane { self.workspace .update(cx, |_, cx| { - cx.defer(move |this, cx| { - this.move_item(pane.clone(), pane, id, destination_index, cx) - }); + cx.defer(move |_, cx| move_item(&pane, &pane, id, destination_index, cx)); }) .ok()?; @@ -1777,9 +1783,7 @@ impl Pane { self.workspace .update(cx, |_, cx| { - cx.defer(move |this, cx| { - this.move_item(pane.clone(), pane, id, destination_index, cx) - }); + cx.defer(move |_, cx| move_item(&pane, &pane, id, destination_index, cx)); }) .ok()?; @@ -2349,7 +2353,7 @@ impl Pane { } }) } - workspace.move_item(from_pane.clone(), to_pane.clone(), item_id, ix, cx); + move_item(&from_pane, &to_pane, item_id, ix, cx); }); }) .log_err(); @@ -2556,6 +2560,7 @@ impl Render for Pane { .on_action(cx.listener(|pane, _: &GoBack, cx| pane.navigate_backward(cx))) .on_action(cx.listener(|pane, _: &GoForward, cx| pane.navigate_forward(cx))) .on_action(cx.listener(|pane, _: &JoinIntoNext, cx| pane.join_into_next(cx))) + .on_action(cx.listener(|pane, _: &JoinAll, cx| pane.join_all(cx))) .on_action(cx.listener(Pane::toggle_zoom)) .on_action(cx.listener(|pane: &mut Pane, action: &ActivateItem, cx| { pane.activate_item(action.0, true, true, cx); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 50b92326b28b19..e6358cfdb95595 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -2965,6 +2965,7 @@ impl Workspace { self.split_and_clone(pane, *direction, cx); } pane::Event::JoinIntoNext => self.join_pane_into_next(pane, cx), + pane::Event::JoinAll => self.join_all_panes(cx), pane::Event::Remove { focus_on_pane } => { self.remove_pane(pane, focus_on_pane.clone(), cx) } @@ -3094,7 +3095,7 @@ impl Workspace { }; let new_pane = self.add_pane(cx); - self.move_item(from.clone(), new_pane.clone(), item_id_to_move, 0, cx); + move_item(&from, &new_pane, item_id_to_move, 0, cx); self.center .split(&pane_to_split, &new_pane, split_direction) .unwrap(); @@ -3122,6 +3123,17 @@ impl Workspace { })) } + pub fn join_all_panes(&mut self, cx: &mut ViewContext) { + let active_item = self.active_pane.read(cx).active_item(); + for pane in &self.panes { + join_pane_into_active(&self.active_pane, pane, cx); + } + if let Some(active_item) = active_item { + self.activate_item(active_item.as_ref(), true, true, cx); + } + cx.notify(); + } + pub fn join_pane_into_next(&mut self, pane: View, cx: &mut ViewContext) { let next_pane = self .find_pane_in_direction(SplitDirection::Right, cx) @@ -3131,48 +3143,10 @@ impl Workspace { let Some(next_pane) = next_pane else { return; }; - - let item_ids: Vec = pane.read(cx).items().map(|item| item.item_id()).collect(); - for item_id in item_ids { - self.move_item(pane.clone(), next_pane.clone(), item_id, 0, cx); - } + move_all_items(&pane, &next_pane, cx); cx.notify(); } - pub fn move_item( - &mut self, - source: View, - destination: View, - item_id_to_move: EntityId, - destination_index: usize, - cx: &mut ViewContext, - ) { - let Some((item_ix, item_handle)) = source - .read(cx) - .items() - .enumerate() - .find(|(_, item_handle)| item_handle.item_id() == item_id_to_move) - else { - // Tab was closed during drag - return; - }; - - let item_handle = item_handle.clone(); - - if source != destination { - // Close item from previous pane - source.update(cx, |source, cx| { - source.remove_item_and_focus_on_pane(item_ix, false, destination.clone(), cx); - }); - } - - // This automatically removes duplicate items in the pane - destination.update(cx, |destination, cx| { - destination.add_item(item_handle, true, true, Some(destination_index), cx); - destination.focus(cx) - }); - } - fn remove_pane( &mut self, pane: View, @@ -5944,6 +5918,79 @@ fn resize_edge( } } +fn join_pane_into_active(active_pane: &View, pane: &View, cx: &mut WindowContext<'_>) { + if pane == active_pane { + return; + } else if pane.read(cx).items_len() == 0 { + pane.update(cx, |_, cx| { + cx.emit(pane::Event::Remove { + focus_on_pane: None, + }); + }) + } else { + move_all_items(pane, active_pane, cx); + } +} + +fn move_all_items(from_pane: &View, to_pane: &View, cx: &mut WindowContext<'_>) { + let destination_is_different = from_pane != to_pane; + let mut moved_items = 0; + for (item_ix, item_handle) in from_pane + .read(cx) + .items() + .enumerate() + .map(|(ix, item)| (ix, item.clone())) + .collect::>() + { + let ix = item_ix - moved_items; + if destination_is_different { + // Close item from previous pane + from_pane.update(cx, |source, cx| { + source.remove_item_and_focus_on_pane(ix, false, to_pane.clone(), cx); + }); + moved_items += 1; + } + + // This automatically removes duplicate items in the pane + to_pane.update(cx, |destination, cx| { + destination.add_item(item_handle, true, true, None, cx); + destination.focus(cx) + }); + } +} + +pub fn move_item( + source: &View, + destination: &View, + item_id_to_move: EntityId, + destination_index: usize, + cx: &mut WindowContext<'_>, +) { + let Some((item_ix, item_handle)) = source + .read(cx) + .items() + .enumerate() + .find(|(_, item_handle)| item_handle.item_id() == item_id_to_move) + .map(|(ix, item)| (ix, item.clone())) + else { + // Tab was closed during drag + return; + }; + + if source != destination { + // Close item from previous pane + source.update(cx, |source, cx| { + source.remove_item_and_focus_on_pane(item_ix, false, destination.clone(), cx); + }); + } + + // This automatically removes duplicate items in the pane + destination.update(cx, |destination, cx| { + destination.add_item(item_handle, true, true, Some(destination_index), cx); + destination.focus(cx) + }); +} + #[cfg(test)] mod tests { use std::{cell::RefCell, rc::Rc}; @@ -6855,6 +6902,80 @@ mod tests { }); } + fn add_an_item_to_active_pane( + cx: &mut VisualTestContext, + workspace: &View, + item_id: u64, + ) -> View { + let item = cx.new_view(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new( + item_id, + "item{item_id}.txt", + cx, + )]) + }); + workspace.update(cx, |workspace, cx| { + workspace.add_item_to_active_pane(Box::new(item.clone()), None, false, cx); + }); + return item; + } + + fn split_pane(cx: &mut VisualTestContext, workspace: &View) -> View { + return workspace.update(cx, |workspace, cx| { + let new_pane = + workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx); + new_pane + }); + } + + #[gpui::test] + async fn test_join_all_panes(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project, cx)); + + add_an_item_to_active_pane(cx, &workspace, 1); + split_pane(cx, &workspace); + add_an_item_to_active_pane(cx, &workspace, 2); + split_pane(cx, &workspace); // empty pane + split_pane(cx, &workspace); + let last_item = add_an_item_to_active_pane(cx, &workspace, 3); + + cx.executor().run_until_parked(); + + workspace.update(cx, |workspace, cx| { + let num_panes = workspace.panes().len(); + let num_items_in_current_pane = workspace.active_pane().read(cx).items().count(); + let active_item = workspace + .active_pane() + .read(cx) + .active_item() + .expect("item is in focus"); + + assert_eq!(num_panes, 4); + assert_eq!(num_items_in_current_pane, 1); + assert_eq!(active_item.item_id(), last_item.item_id()); + }); + + workspace.update(cx, |workspace, cx| { + workspace.join_all_panes(cx); + }); + + workspace.update(cx, |workspace, cx| { + let num_panes = workspace.panes().len(); + let num_items_in_current_pane = workspace.active_pane().read(cx).items().count(); + let active_item = workspace + .active_pane() + .read(cx) + .active_item() + .expect("item is in focus"); + + assert_eq!(num_panes, 1); + assert_eq!(num_items_in_current_pane, 3); + assert_eq!(active_item.item_id(), last_item.item_id()); + }); + } struct TestModal(FocusHandle); impl TestModal { From 13c553c50f38af721cb51d775b898d4fda20ccb0 Mon Sep 17 00:00:00 2001 From: Nero Song Date: Wed, 11 Sep 2024 20:49:12 +0800 Subject: [PATCH 044/762] Doc Fix: Shortcut of "Go forward" in key-bindings.md (#17689) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit "Control + _" => "Control + Shift + _" Doc was edited so quickly using zed that caused a typo 😄 ### shot image ### doc url https://zed.dev/docs/key-bindings Release Notes: - N/A --- docs/src/key-bindings.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 627e3fc6c4d2cc..2a97bc62a89178 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -426,7 +426,7 @@ TBD: Add Column with Linux shortcuts | Close clean items | Pane | `⌘ + K, U` | | Close inactive items | Pane | `Alt + ⌘ + T` | | Go back | Pane | `Control + -` | -| Go forward | Pane | `Control + _` | +| Go forward | Pane | `Control + Shift + _` | | Reopen closed item | Pane | `⌘ + Shift + T` | | Split down | Pane | `⌘ + K, Down` | | Split left | Pane | `⌘ + K, Left` | From b5c42edf1e0f0a42b6f14d24e8b065ef0959b3dd Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 11 Sep 2024 12:56:39 -0400 Subject: [PATCH 045/762] lsp: Fix noisy logs when starting language servers (#17713) We would log every time we'd lookup a language server for a file and we'd also log "starting language server" even though we were about to only download it and not start it. Release Notes: - N/A --- crates/language/src/language_registry.rs | 2 +- crates/lsp/src/lsp.rs | 2 +- crates/project/src/lsp_store.rs | 9 --------- 3 files changed, 2 insertions(+), 11 deletions(-) diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index a65d20019f3da0..dcce78d6cee371 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -833,7 +833,7 @@ impl LanguageRegistry { ) -> Option { let server_id = self.state.write().next_language_server_id(); log::info!( - "starting language server {:?}, path: {root_path:?}, id: {server_id}", + "attempting to start language server {:?}, path: {root_path:?}, id: {server_id}", adapter.name.0 ); diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 061291757512bf..21671cd0b13265 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -272,7 +272,7 @@ impl LanguageServer { }; log::info!( - "starting language server. binary path: {:?}, working directory: {:?}, args: {:?}", + "starting language server process. binary path: {:?}, working directory: {:?}, args: {:?}", binary.path, working_dir, &binary.arguments diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 3b6b9ebb0a636d..dbc75389694d8b 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -26,7 +26,6 @@ use gpui::{ Task, WeakModel, }; use http_client::{AsyncBody, Error, HttpClient, Request, Response, Uri}; -use itertools::Itertools; use language::{ language_settings::{ all_language_settings, language_settings, AllLanguageSettings, LanguageSettings, @@ -4489,14 +4488,6 @@ impl LspStore { ); } - log::info!( - "starting language servers for {language}: {adapters}", - adapters = enabled_lsp_adapters - .iter() - .map(|adapter| adapter.name.0.as_ref()) - .join(", ") - ); - for adapter in &enabled_lsp_adapters { self.start_language_server(worktree, adapter.clone(), language.clone(), cx); } From 9407d86ce615fd13589f264e4c595a328effa0f1 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 11 Sep 2024 13:33:42 -0400 Subject: [PATCH 046/762] project: Use login shell to get environment per project (#17717) This is a follow-up to #17075 to spawn a login shell when getting the environment for projects. The reason why we didn't do it before is that we only used the environment for certain language servers and not a lot of other things, like tasks. But with #17075 we now use the project more often and use it as the _base_ environment for tasks/terminals. Before the change, terminals and tasks would inherit the Zed process' environment, including PATH and so on. After the change, we would set the environment, overwriting the PATH instead of merging. But the non-login shell environment is a subset of the login-shell environment. Release Notes: - Fixed environment variables used per project in terminals/tasks overwriting the base environment and not making use of a login-shell environment. --- crates/project/src/environment.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index b74b577b3992d3..9742b8b6d58e32 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -219,7 +219,7 @@ async fn load_shell_environment( ); let output = smol::process::Command::new(&shell) - .args(["-i", "-c", &command]) + .args(["-l", "-i", "-c", &command]) .envs(direnv_environment) .output() .await From 3a6a29f1173e3cc1059d2220985a53581864388c Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 11 Sep 2024 16:13:17 -0400 Subject: [PATCH 047/762] vim: Fix inline completions showing up in normal mode (#17727) Booleans are hard. Release Notes: - Fixed inline completions showing up in Vim normal mode. --- crates/editor/src/editor.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f750abd95c76df..e50bf67ab01a28 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -4975,9 +4975,10 @@ impl Editor { let cursor = self.selections.newest_anchor().head(); let (buffer, cursor_buffer_position) = self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; + if !user_requested - && self.enable_inline_completions - && !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx) + && (!self.enable_inline_completions + || !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx)) { self.discard_inline_completion(false, cx); return None; From 25b6e43b0f8faf3b24b4ab4ee02456d3933539ce Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 11 Sep 2024 16:22:10 -0400 Subject: [PATCH 048/762] bump eslint memory usage (#17724) Release Notes: - Increased memory limit for eslint to reduce crashes --- crates/languages/src/typescript.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 185c10be44a70f..9b6d41451f72a1 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -58,7 +58,11 @@ fn typescript_server_binary_arguments(server_path: &Path) -> Vec { } fn eslint_server_binary_arguments(server_path: &Path) -> Vec { - vec![server_path.into(), "--stdio".into()] + vec![ + "--max-old-space-size=8192".into(), + server_path.into(), + "--stdio".into(), + ] } pub struct TypeScriptLspAdapter { From 092f29d3944f969eea6175f6b654c7832ecdeceb Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Sep 2024 09:11:19 -0400 Subject: [PATCH 049/762] Use a bigger prefix for numeric sorting (#17752) Release Notes: - Fixed sorting of files with YYYYmmddHHMMSS prefix --- crates/util/src/util.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index 40a5cf6212e443..a161b8bac2b59f 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -644,7 +644,7 @@ impl RangeExt for RangeInclusive { /// This is useful for turning regular alphanumerically sorted sequences as `1-abc, 10, 11-def, .., 2, 21-abc` /// into `1-abc, 2, 10, 11-def, .., 21-abc` #[derive(Debug, PartialEq, Eq)] -pub struct NumericPrefixWithSuffix<'a>(Option, &'a str); +pub struct NumericPrefixWithSuffix<'a>(Option, &'a str); impl<'a> NumericPrefixWithSuffix<'a> { pub fn from_numeric_prefixed_str(str: &'a str) -> Self { From 9db68ee6ae0a5520ec44b9281a1eb0bd48eab9c9 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 12 Sep 2024 09:47:25 -0400 Subject: [PATCH 050/762] lsp: Use project-local settings if available (#17753) Release Notes: - Changed built-in language support (Rust, Go, C, YAML, ...) to lookup language-server specific settings locally in project directory first before falling back to global value. --------- Co-authored-by: Bennet --- crates/languages/src/c.rs | 8 ++------ crates/languages/src/go.rs | 8 ++------ crates/languages/src/python.rs | 10 ++++------ crates/languages/src/rust.rs | 8 ++------ crates/languages/src/tailwind.rs | 15 +++++---------- crates/languages/src/typescript.rs | 13 ++++--------- crates/languages/src/vtsls.rs | 24 ++++++------------------ crates/languages/src/yaml.rs | 12 ++++-------- crates/project/src/lsp_store.rs | 19 ++++++++++++++++++- 9 files changed, 47 insertions(+), 70 deletions(-) diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index ea11b4e0d0bc34..243f61b084c4e8 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -5,8 +5,7 @@ use gpui::AsyncAppContext; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; pub use language::*; use lsp::LanguageServerBinary; -use project::project_settings::{BinarySettings, ProjectSettings}; -use settings::Settings; +use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use smol::fs::{self, File}; use std::{any::Any, env::consts, path::PathBuf, sync::Arc}; use util::{fs::remove_matching, maybe, ResultExt}; @@ -29,10 +28,7 @@ impl super::LspAdapter for CLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) - .and_then(|s| s.binary.clone()) + language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index a528f4f70cdc44..55850411ca88db 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -5,10 +5,9 @@ use gpui::{AppContext, AsyncAppContext, Task}; use http_client::github::latest_github_release; pub use language::*; use lsp::LanguageServerBinary; -use project::project_settings::{BinarySettings, ProjectSettings}; +use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use regex::Regex; use serde_json::json; -use settings::Settings; use smol::{fs, process}; use std::{ any::Any, @@ -71,10 +70,7 @@ impl super::LspAdapter for GoLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) - .and_then(|s| s.binary.clone()) + language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index f9baed63fb66c0..a0005e6f97878c 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -5,9 +5,9 @@ use gpui::AsyncAppContext; use language::{ContextProvider, LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; -use project::project_settings::ProjectSettings; +use project::lsp_store::language_server_settings; use serde_json::Value; -use settings::Settings; + use std::{ any::Any, borrow::Cow, @@ -177,13 +177,11 @@ impl LspAdapter for PythonLspAdapter { async fn workspace_configuration( self: Arc, - _: &Arc, + adapter: &Arc, cx: &mut AsyncAppContext, ) -> Result { cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(adapter.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() }) diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 46b6ce475d5283..a32b4f55da1c81 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -7,9 +7,8 @@ use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; pub use language::*; use language_settings::all_language_settings; use lsp::LanguageServerBinary; -use project::project_settings::{BinarySettings, ProjectSettings}; +use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use regex::Regex; -use settings::Settings; use smol::fs::{self, File}; use std::{ any::Any, @@ -40,10 +39,7 @@ impl LspAdapter for RustLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) - .and_then(|s| s.binary.clone()) + language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 39ccc8afa10a94..524e4ce84611bc 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -6,9 +6,8 @@ use gpui::AsyncAppContext; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; -use project::project_settings::ProjectSettings; +use project::lsp_store::language_server_settings; use serde_json::{json, Value}; -use settings::Settings; use smol::fs; use std::{ any::Any, @@ -53,14 +52,12 @@ impl LspAdapter for TailwindLspAdapter { async fn check_if_user_installed( &self, - _delegate: &dyn LspAdapterDelegate, + delegate: &dyn LspAdapterDelegate, cx: &AsyncAppContext, ) -> Option { let configured_binary = cx .update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate, Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -171,13 +168,11 @@ impl LspAdapter for TailwindLspAdapter { async fn workspace_configuration( self: Arc, - _: &Arc, + delegate: &Arc, cx: &mut AsyncAppContext, ) -> Result { let tailwind_user_settings = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 9b6d41451f72a1..14b6303f5cf906 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -8,10 +8,9 @@ use http_client::github::{build_asset_url, AssetKind, GitHubLspBinaryVersion}; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; -use project::project_settings::ProjectSettings; +use project::lsp_store::language_server_settings; use project::ContextProviderWithTasks; use serde_json::{json, Value}; -use settings::Settings; use smol::{fs, io::BufReader, stream::StreamExt}; use std::{ any::Any, @@ -236,13 +235,11 @@ impl LspAdapter for TypeScriptLspAdapter { async fn workspace_configuration( self: Arc, - _: &Arc, + delegate: &Arc, cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.initialization_options.clone()) })?; if let Some(options) = override_options { @@ -334,9 +331,7 @@ impl LspAdapter for EsLintLspAdapter { let workspace_root = delegate.worktree_root_path(); let eslint_user_settings = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 3dca82688cb81d..744405642d59cd 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -5,9 +5,8 @@ use gpui::AsyncAppContext; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; -use project::project_settings::{BinarySettings, ProjectSettings}; +use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use serde_json::{json, Value}; -use settings::{Settings, SettingsLocation}; use std::{ any::Any, ffi::OsString, @@ -75,10 +74,7 @@ impl LspAdapter for VtslsLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(SERVER_NAME) - .and_then(|s| s.binary.clone()) + language_server_settings(delegate, SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { @@ -270,26 +266,18 @@ impl LspAdapter for VtslsLspAdapter { async fn workspace_configuration( self: Arc, - adapter: &Arc, + delegate: &Arc, cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - ProjectSettings::get( - Some(SettingsLocation { - worktree_id: adapter.worktree_id(), - path: adapter.worktree_root_path(), - }), - cx, - ) - .lsp - .get(SERVER_NAME) - .and_then(|s| s.initialization_options.clone()) + language_server_settings(delegate.as_ref(), SERVER_NAME, cx) + .and_then(|s| s.initialization_options.clone()) })?; if let Some(options) = override_options { return Ok(options); } let mut initialization_options = self - .initialization_options(adapter) + .initialization_options(delegate) .await .map(|o| o.unwrap())?; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 51a9913b249e9f..b75b3c722667df 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -7,7 +7,7 @@ use language::{ }; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; -use project::project_settings::ProjectSettings; +use project::lsp_store::language_server_settings; use serde_json::Value; use settings::{Settings, SettingsLocation}; use smol::fs; @@ -44,14 +44,12 @@ impl LspAdapter for YamlLspAdapter { async fn check_if_user_installed( &self, - _delegate: &dyn LspAdapterDelegate, + delegate: &dyn LspAdapterDelegate, cx: &AsyncAppContext, ) -> Option { let configured_binary = cx .update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate, Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -147,9 +145,7 @@ impl LspAdapter for YamlLspAdapter { let mut options = serde_json::json!({"[yaml]": {"editor.tabSize": tab_size}}); let project_options = cx.update(|cx| { - ProjectSettings::get_global(cx) - .lsp - .get(Self::SERVER_NAME) + language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) .and_then(|s| s.initialization_options.clone()) })?; if let Some(override_options) = project_options { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index dbc75389694d8b..307e86de450b7f 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -3,7 +3,7 @@ use crate::{ environment::ProjectEnvironment, lsp_command::{self, *}, lsp_ext_command, - project_settings::ProjectSettings, + project_settings::{LspSettings, ProjectSettings}, relativize_path, resolve_path, worktree_store::{WorktreeStore, WorktreeStoreEvent}, yarn::YarnPathStore, @@ -7035,6 +7035,23 @@ impl HttpClient for BlockedHttpClient { None } } + +pub fn language_server_settings<'a, 'b: 'a>( + delegate: &'a dyn LspAdapterDelegate, + language: &str, + cx: &'b AppContext, +) -> Option<&'a LspSettings> { + ProjectSettings::get( + Some(SettingsLocation { + worktree_id: delegate.worktree_id(), + path: delegate.worktree_root_path(), + }), + cx, + ) + .lsp + .get(language) +} + #[async_trait] impl LspAdapterDelegate for ProjectLspAdapterDelegate { fn show_notification(&self, message: &str, cx: &mut AppContext) { From 02d5f320ad621e92cafaa7075f61e6496e12cd98 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 12 Sep 2024 10:02:45 -0400 Subject: [PATCH 051/762] lsp: Fix initialization_options being used as workspace configuration (#17757) Release Notes: - Fixed user-configured `initialization_options` being passed as `workspace/Configuration` for the vtsls, TypeScript, and YAML language servers. Co-authored-by: Bennet --- crates/languages/src/typescript.rs | 2 +- crates/languages/src/vtsls.rs | 17 +++-------------- crates/languages/src/yaml.rs | 2 +- 3 files changed, 5 insertions(+), 16 deletions(-) diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 14b6303f5cf906..cc52df2922a210 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -240,7 +240,7 @@ impl LspAdapter for TypeScriptLspAdapter { ) -> Result { let override_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) - .and_then(|s| s.initialization_options.clone()) + .and_then(|s| s.settings.clone()) })?; if let Some(options) = override_options { return Ok(options); diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 744405642d59cd..f3b46d26850ef3 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -13,7 +13,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::{maybe, merge_json_value_into, ResultExt}; +use util::{maybe, ResultExt}; fn typescript_server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -271,20 +271,9 @@ impl LspAdapter for VtslsLspAdapter { ) -> Result { let override_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), SERVER_NAME, cx) - .and_then(|s| s.initialization_options.clone()) + .and_then(|s| s.settings.clone()) })?; - if let Some(options) = override_options { - return Ok(options); - } - let mut initialization_options = self - .initialization_options(delegate) - .await - .map(|o| o.unwrap())?; - - if let Some(override_options) = override_options { - merge_json_value_into(override_options, &mut initialization_options) - } - Ok(initialization_options) + Ok(override_options.unwrap_or_default()) } fn language_ids(&self) -> HashMap { diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index b75b3c722667df..06360847acc803 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -146,7 +146,7 @@ impl LspAdapter for YamlLspAdapter { let project_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) - .and_then(|s| s.initialization_options.clone()) + .and_then(|s| s.settings.clone()) })?; if let Some(override_options) = project_options { merge_json_value_into(override_options, &mut options); From b341079d8a0bd8293f0fb0c479a2af5f47476f6e Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Sep 2024 11:23:37 -0400 Subject: [PATCH 052/762] Fix maxTsServerMemory (#17758) Release Notes: - N/A --------- Co-authored-by: Thorsten Ball --- crates/languages/src/vtsls.rs | 19 +++++++++++++++---- docs/src/languages/typescript.md | 2 +- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index f3b46d26850ef3..9499b5c54fbeba 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -222,9 +222,6 @@ impl LspAdapter for VtslsLspAdapter { "suggest": { "completeFunctionCalls": true }, - "tsserver": { - "maxTsServerMemory": 8092 - }, "inlayHints": { "parameterNames": { "enabled": "all", @@ -273,7 +270,21 @@ impl LspAdapter for VtslsLspAdapter { language_server_settings(delegate.as_ref(), SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; - Ok(override_options.unwrap_or_default()) + + if let Some(options) = override_options { + return Ok(options); + } + + let config = serde_json::json!({ + "tsserver": { + "maxTsServerMemory": 8092 + }, + }); + + Ok(serde_json::json!({ + "typescript": config, + "javascript": config + })) } fn language_ids(&self) -> HashMap { diff --git a/docs/src/languages/typescript.md b/docs/src/languages/typescript.md index 12529500d81782..bfe63c5b2c1a6c 100644 --- a/docs/src/languages/typescript.md +++ b/docs/src/languages/typescript.md @@ -52,7 +52,7 @@ Prettier will also be used for TypeScript files by default. To disable this: { "lsp": { "vtsls": { - "initialization_options": { + "settings": { // For TypeScript: "typescript": { "tsserver": { "maxTsServerMemory": 16184 } }, // For JavaScript: From 0043b0d9579fbc17e1362964a4f3922ebe1605eb Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 12 Sep 2024 11:32:24 -0400 Subject: [PATCH 053/762] editor: Render documentation popovers using UI font (#17761) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR updates the documentation popovers to render non-code using the UI font: Screenshot 2024-09-12 at 11 10 46 AM Screenshot 2024-09-12 at 11 21 12 AM Requested by @davidbarsky. Release Notes: - Changed documentation popovers to render Markdown prose using the UI font instead of the buffer font. Code blocks still using the buffer font. --- crates/editor/src/hover_popover.rs | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 0dc2f098394313..adbb5899ff74d0 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -518,19 +518,22 @@ async fn parse_blocks( let rendered_block = cx .new_view(|cx| { let settings = ThemeSettings::get_global(cx); + let ui_font_family = settings.ui_font.family.clone(); let buffer_font_family = settings.buffer_font.family.clone(); - let mut base_style = cx.text_style(); - base_style.refine(&TextStyleRefinement { - font_family: Some(buffer_font_family.clone()), + + let mut base_text_style = cx.text_style(); + base_text_style.refine(&TextStyleRefinement { + font_family: Some(ui_font_family.clone()), color: Some(cx.theme().colors().editor_foreground), ..Default::default() }); let markdown_style = MarkdownStyle { - base_text_style: base_style, - code_block: StyleRefinement::default().mt(rems(1.)).mb(rems(1.)), + base_text_style, + code_block: StyleRefinement::default().my(rems(1.)).font_buffer(cx), inline_code: TextStyleRefinement { background_color: Some(cx.theme().colors().background), + font_family: Some(buffer_font_family), ..Default::default() }, rule_color: Color::Muted.color(cx), From f39c175bd3a45a508b370e80492b8539abfa41e2 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 11:52:59 -0400 Subject: [PATCH 054/762] Update Rust crate serde_json_lenient to 0.2 (#17732) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [serde_json_lenient](https://redirect.github.com/google/serde_json_lenient) | workspace.dependencies | minor | `0.1` -> `0.2` | --- ### Release Notes
google/serde_json_lenient (serde_json_lenient) ### [`v0.2.1`](https://redirect.github.com/google/serde_json_lenient/releases/tag/v0.2.1) [Compare Source](https://redirect.github.com/google/serde_json_lenient/compare/v0.2.0...v0.2.1) - Fixed newline handling ([#​20](https://redirect.github.com/google/serde_json_lenient/issues/20)) ### [`v0.2.0`](https://redirect.github.com/google/serde_json_lenient/releases/tag/v0.2.0) [Compare Source](https://redirect.github.com/google/serde_json_lenient/compare/v0.1.8...v0.2.0) - Merged from upstream `serde_json` - Introduce cfg!(parse_negative_zero_as_int) to create a build-time option to parse -0 the same as other JSON parsers. ([#​16](https://redirect.github.com/google/serde_json_lenient/issues/16)) - Add an option to control escaped newlines separately from other control ([#​18](https://redirect.github.com/google/serde_json_lenient/issues/18))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2027c5421f5dda..36691e72ab772a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9928,9 +9928,9 @@ dependencies = [ [[package]] name = "serde_json_lenient" -version = "0.1.8" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc61c66b53a4035fcce237ef38043f4b2f0ebf918fd0e69541a5166104065581" +checksum = "a5d0bae483150302560d7cb52e7932f39b69a6fbdd099e48d33ef060a8c9c078" dependencies = [ "indexmap 2.4.0", "itoa", diff --git a/Cargo.toml b/Cargo.toml index ea8284ccb12b0c..23b17fd2916957 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -393,7 +393,7 @@ semver = "1.0" serde = { version = "1.0", features = ["derive", "rc"] } serde_derive = { version = "1.0", features = ["deserialize_in_place"] } serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] } -serde_json_lenient = { version = "0.1", features = [ +serde_json_lenient = { version = "0.2", features = [ "preserve_order", "raw_value", ] } From 6841f7b9d792f6f89ca1b1bcbfca9dc763b66531 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 11:53:31 -0400 Subject: [PATCH 055/762] Update Python to v3.12.6 (#17728) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [python](https://redirect.github.com/containerbase/python-prebuild) | dependencies | patch | `3.12.5` -> `3.12.6` | --- ### Release Notes
containerbase/python-prebuild (python) ### [`v3.12.6`](https://redirect.github.com/containerbase/python-prebuild/releases/tag/3.12.6) [Compare Source](https://redirect.github.com/containerbase/python-prebuild/compare/3.12.5...3.12.6) ##### Bug Fixes - **deps:** update dependency python to v3.12.6
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- script/update_top_ranking_issues/poetry.lock | 4 ++-- script/update_top_ranking_issues/pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/script/update_top_ranking_issues/poetry.lock b/script/update_top_ranking_issues/poetry.lock index ae2120c741cd9e..a85844e645f100 100644 --- a/script/update_top_ranking_issues/poetry.lock +++ b/script/update_top_ranking_issues/poetry.lock @@ -529,5 +529,5 @@ files = [ [metadata] lock-version = "2.0" -python-versions = "3.12.5" -content-hash = "3e6aa4dc758eb933f7e2d1a305d1e397b13a960ac4846ef54c5a11b906b77015" +python-versions = "3.12.6" +content-hash = "7827704e06a8c195297507e0d05e7a7c3843ed299bd353f31570ee4c435c6896" diff --git a/script/update_top_ranking_issues/pyproject.toml b/script/update_top_ranking_issues/pyproject.toml index cb92ce9c536323..15d8346bb99103 100644 --- a/script/update_top_ranking_issues/pyproject.toml +++ b/script/update_top_ranking_issues/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" [tool.poetry.dependencies] mypy = "1.6.0" PyGithub = "1.55" -python = "3.12.5" +python = "3.12.6" pytz = "2022.1" typer = "0.9.0" types-pytz = "2023.3.1.1" From bba380e41ac9d09b36f63abdaa13477e727c1d3a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:06:38 -0400 Subject: [PATCH 056/762] docs: Add copywriting tweaks to the Vim page (#17766) Quick writing refinements as we displayed this docs over at RustConf. Namely: - Removal of "here" links - Making link anchors generally bigger - Adding commas where suitable - Capitalizing "Vim" (although "vim mode" is still lowercased) --- Release Notes: - N/A --- docs/src/vim.md | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/docs/src/vim.md b/docs/src/vim.md index d4e41b58199570..777534813f9657 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -12,7 +12,7 @@ So, Zed's vim mode does not replicate Vim one-to-one, but it meshes Vim's modal ## Enabling and disabling vim mode -When you first open Zed, a checkbox will appear on the welcome screen, allowing you to enable vim mode. +When you first open Zed, you'll see a checkbox on the welcome screen that allows you to enable vim mode. If you missed this, you can toggle vim mode on or off anytime by opening the command palette and using the workspace command `toggle vim mode`. @@ -83,7 +83,7 @@ ctrl-x ctrl-z Hides all suggestions :Ext[ensions] Open the extensions window ``` -Vim mode uses Zed to define concepts like "brackets" (for the `%` key) and "words" (for motions like `w` and `e`). This does lead to some differences, but they are mostly positive. For example `%` considers `|` to be a bracket in languages like Rust; and `w` considers `$` to be a word-character in languages like Javascript. +Vim mode uses Zed to define concepts like "brackets" (for the `%` key) and "words" (for motions like `w` and `e`). This does lead to some differences, but they are mostly positive. For example `%` considers `|` to be a bracket in languages like Rust; and `w` considers `$` to be a word-character in languages like JavaScript. Vim mode emulates visual block mode using Zed's multiple cursor support. This again leads to some differences, but is much more powerful. @@ -129,11 +129,13 @@ For vim-specific shortcuts, you may find the following template a good place to If you would like to emulate vim's `map` (`nmap` etc.) commands you can bind to the [`workspace::SendKeystrokes`](./key-bindings.md#remapping-keys) action in the correct context. -You can see the bindings that are enabled by default in vim mode [here](https://github.com/zed-industries/zed/blob/main/assets/keymaps/vim.json). +Check out the [bindings that are enabled by default in vim mode](https://github.com/zed-industries/zed/blob/main/assets/keymaps/vim.json). ### Contexts -Zed's keyboard bindings are evaluated only when the `"context"` matches the location you are in on the screen. Locations are nested, so when you're editing you're in the `"Workspace"` location is at the top, containing a `"Pane"` which contains an `"Editor"`. Contexts are matched only on one level at a time. So it is possible to combine `Editor && vim_mode == normal`, but `Workspace && vim_mode == normal` will never match because we set the vim context at the `Editor` level. +Zed's keyboard bindings are evaluated only when the `"context"` matches the location you are in on the screen. Locations are nested, so when you're editing, you're in the `"Workspace"` location, which is at the top, containing a `"Pane"` that contains an `"Editor"`. + +Contexts are matched only on one level at a time. So, it is possible to combine `Editor && vim_mode == normal`, but `Workspace && vim_mode == normal` will never match because we set the vim context at the `Editor` level. Vim mode adds several contexts to the `Editor`: @@ -164,13 +166,13 @@ If you're using vim mode on Linux or Windows, you may find it overrides keybindi Vim mode allows you to enable Zed’s command palette with `:`. This means that you can use vim's command palette to run any action that Zed supports. -Additionally, vim mode contains a number of aliases for popular vim commands to ensure that muscle memory works. For example `:w` will save the file. +Additionally, vim mode contains a number of aliases for popular Vim commands to ensure that muscle memory works. For example, `:w` will save the file. -We do not (yet) emulate the full power of vim’s command line, in particular, we do not support arguments to commands yet. Please reach out on [GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. +We do not (yet) emulate the full power of Vim’s command line, in particular, we do not support arguments to commands yet. Please [file issues on GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. As mentioned above, one thing to be aware of is that the regex engine is slightly different from vim's in `:%s/a/b`. -Currently supported vim-specific commands: +Currently supported Vim-specific commands: ``` # window management @@ -296,7 +298,7 @@ There are also a few Zed settings that you may also enjoy if you use vim mode: } ``` -If you want to navigate between the editor and docks (terminal, project panel, AI assistant, ...) just like you navigate between splits you can use the following key bindings: +If you want to navigate between the editor and docks (terminal, project panel, AI assistant panel, etc...), just like you navigate between splits, you can use the following key bindings: ```json { @@ -366,4 +368,4 @@ Notably: To help with the transition, the command palette will fix parentheses and replace groups for you when you run `:%s//`. So `%s:/\(a\)(b)/\1/` will be converted into a search for "(a)\(b\)" and a replacement of "$1". -For the full syntax supported by Zed's regex engine see the [regex crate documentation](https://docs.rs/regex/latest/regex/#syntax). +For the full syntax supported by Zed's regex engine [see the regex crate documentation](https://docs.rs/regex/latest/regex/#syntax). From 3b37db4140349d4120a4724afa9c41c429efd49b Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:17:59 -0400 Subject: [PATCH 057/762] Improve button copy on database load error toast (#17767) Minimal copywriting improvement as that just happened to me while working on Zed during ReactConf. Release Notes: - N/A --- crates/workspace/src/workspace.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index e6358cfdb95595..0d774277944c02 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -4644,7 +4644,7 @@ fn notify_if_database_failed(workspace: WindowHandle, cx: &mut AsyncA |cx| { cx.new_view(|_| { MessageNotification::new("Failed to load the database file.") - .with_click_message("Click to let us know about this error") + .with_click_message("File an issue") .on_click(|cx| cx.open_url(REPORT_ISSUE_URL)) }) }, From 4d26f83d23165d841ebe95f3c1123616b30709ce Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Sep 2024 14:46:08 -0400 Subject: [PATCH 058/762] Revert "settings: Remove auxiliary Content types where possible (#16744)" (#17768) This breaks setting `{"scrollbar": {"show":"never"}}` Release Notes: - N/A --- crates/auto_update/src/auto_update.rs | 23 +- crates/call/src/call_settings.rs | 22 +- crates/client/src/client.rs | 50 +-- crates/collab/src/tests/editor_tests.rs | 8 +- crates/collab/src/tests/following_tests.rs | 2 +- crates/collab_ui/src/chat_panel.rs | 2 +- .../src/chat_panel/message_editor.rs | 8 +- crates/collab_ui/src/collab_panel.rs | 2 +- crates/collab_ui/src/notification_panel.rs | 2 +- crates/collab_ui/src/panel_settings.rs | 74 ++-- .../src/project_diagnostics_settings.rs | 20 +- crates/editor/src/editor.rs | 4 +- crates/editor/src/editor_settings.rs | 318 +++++++++++------- crates/editor/src/editor_settings_controls.rs | 22 +- crates/editor/src/editor_tests.rs | 18 +- crates/editor/src/element.rs | 17 +- crates/extension/src/extension_settings.rs | 13 +- crates/extensions_ui/src/extensions_ui.rs | 2 +- crates/go_to_line/src/cursor_position.rs | 16 +- crates/gpui/src/geometry.rs | 2 - crates/language/src/language_settings.rs | 8 +- crates/languages/src/json.rs | 29 +- crates/outline_panel/src/outline_panel.rs | 16 +- .../src/outline_panel_settings.rs | 72 ++-- crates/performance/src/performance.rs | 184 ---------- crates/project/src/project_settings.rs | 41 ++- crates/project_panel/src/project_panel.rs | 28 +- .../src/project_panel_settings.rs | 92 +++-- crates/recent_projects/src/dev_servers.rs | 3 +- crates/recent_projects/src/ssh_connections.rs | 25 +- crates/repl/src/jupyter_settings.rs | 28 +- crates/tasks_ui/src/settings.rs | 18 +- crates/vim/src/digraph.rs | 2 +- crates/vim/src/normal.rs | 6 +- crates/vim/src/normal/paste.rs | 12 +- crates/vim/src/normal/scroll.rs | 2 +- crates/vim/src/normal/search.rs | 4 +- crates/vim/src/test.rs | 2 +- crates/vim/src/test/vim_test_context.rs | 6 +- crates/vim/src/vim.rs | 32 +- crates/welcome/src/base_keymap_picker.rs | 2 +- crates/welcome/src/base_keymap_setting.rs | 6 +- crates/welcome/src/welcome.rs | 2 +- crates/workspace/src/item.rs | 70 ++-- crates/workspace/src/workspace.rs | 8 +- crates/workspace/src/workspace_settings.rs | 132 ++++---- crates/worktree/src/worktree_settings.rs | 43 +-- crates/worktree/src/worktree_tests.rs | 11 +- crates/zed/src/zed.rs | 2 +- 49 files changed, 682 insertions(+), 829 deletions(-) delete mode 100644 crates/performance/src/performance.rs diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 499df7fc298594..8063ff4c40fca3 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -116,30 +116,27 @@ impl Drop for MacOsUnmounter { } } -/// Whether or not to automatically check for updates. -#[derive(Clone, Copy, JsonSchema, Deserialize, Serialize)] -#[serde(default)] -#[serde(transparent)] struct AutoUpdateSetting(bool); -impl Default for AutoUpdateSetting { - fn default() -> Self { - Self(true) - } -} +/// Whether or not to automatically check for updates. +/// +/// Default: true +#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)] +#[serde(transparent)] +struct AutoUpdateSettingContent(bool); impl Settings for AutoUpdateSetting { const KEY: Option<&'static str> = Some("auto_update"); - type FileContent = Self; + type FileContent = Option; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { let auto_update = [sources.release_channel, sources.user] .into_iter() - .find_map(|value| value.copied()) - .unwrap_or(*sources.default); + .find_map(|value| value.copied().flatten()) + .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); - Ok(auto_update) + Ok(Self(auto_update.0)) } } diff --git a/crates/call/src/call_settings.rs b/crates/call/src/call_settings.rs index e10b711734bb6d..446178ffb982d0 100644 --- a/crates/call/src/call_settings.rs +++ b/crates/call/src/call_settings.rs @@ -4,20 +4,30 @@ use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -/// Configuration of voice calls in Zed. -#[derive(Clone, Debug, Default, Deserialize, Serialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize, Debug)] pub struct CallSettings { - /// Whether the microphone should be muted when joining a channel or a call. pub mute_on_join: bool, - /// Whether your current project should be shared when joining an empty channel. pub share_on_join: bool, } +/// Configuration of voice calls in Zed. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct CallSettingsContent { + /// Whether the microphone should be muted when joining a channel or a call. + /// + /// Default: false + pub mute_on_join: Option, + + /// Whether your current project should be shared when joining an empty channel. + /// + /// Default: true + pub share_on_join: Option, +} + impl Settings for CallSettings { const KEY: Option<&'static str> = Some("calls"); - type FileContent = Self; + type FileContent = CallSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 83eef45be802bb..8787e2ed9675fe 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -99,26 +99,20 @@ pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(20); actions!(client, [SignIn, SignOut, Reconnect]); -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] -pub struct ClientSettings { - /// The server to connect to. If the environment variable - /// ZED_SERVER_URL is set, it will override this setting. - pub server_url: String, +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct ClientSettingsContent { + server_url: Option, } -impl Default for ClientSettings { - fn default() -> Self { - Self { - server_url: "https://zed.dev".to_owned(), - } - } +#[derive(Deserialize)] +pub struct ClientSettings { + pub server_url: String, } impl Settings for ClientSettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = ClientSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { let mut result = sources.json_merge::()?; @@ -130,37 +124,19 @@ impl Settings for ClientSettings { } #[derive(Default, Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] -pub struct ProxySettings { - /// Set a proxy to use. The proxy protocol is specified by the URI scheme. - /// - /// Supported URI scheme: `http`, `https`, `socks4`, `socks4a`, `socks5`, - /// `socks5h`. `http` will be used when no scheme is specified. - /// - /// By default no proxy will be used, or Zed will try get proxy settings from - /// environment variables. - /// - /// Examples: - /// - "proxy": "socks5://localhost:10808" - /// - "proxy": "http://127.0.0.1:10809" - #[schemars(example = "Self::example_1")] - #[schemars(example = "Self::example_2")] - pub proxy: Option, +pub struct ProxySettingsContent { + proxy: Option, } -impl ProxySettings { - fn example_1() -> String { - "http://127.0.0.1:10809".to_owned() - } - fn example_2() -> String { - "socks5://localhost:10808".to_owned() - } +#[derive(Deserialize, Default)] +pub struct ProxySettings { + pub proxy: Option, } impl Settings for ProxySettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = ProxySettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { Ok(Self { diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index a214291752a5ef..3f205b7f937c96 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -2261,11 +2261,11 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA cx_a.update(editor::init); cx_b.update(editor::init); // Turn inline-blame-off by default so no state is transferred without us explicitly doing so - let inline_blame_off_settings = InlineBlameSettings { + let inline_blame_off_settings = Some(InlineBlameSettings { enabled: false, - delay_ms: 0, - min_column: 0, - }; + delay_ms: None, + min_column: None, + }); cx_a.update(|cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings::(cx, |settings| { diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index 1bc3cd691778d1..e66b66a1b45893 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -1649,7 +1649,7 @@ async fn test_following_into_excluded_file( cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |settings| { - settings.file_scan_exclusions = vec!["**/.git".to_string()]; + settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]); }); }); }); diff --git a/crates/collab_ui/src/chat_panel.rs b/crates/collab_ui/src/chat_panel.rs index f6e6c7321ff80d..5a79f364ff571f 100644 --- a/crates/collab_ui/src/chat_panel.rs +++ b/crates/collab_ui/src/chat_panel.rs @@ -1108,7 +1108,7 @@ impl Panel for ChatPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = position, + move |settings, _| settings.dock = Some(position), ); } diff --git a/crates/collab_ui/src/chat_panel/message_editor.rs b/crates/collab_ui/src/chat_panel/message_editor.rs index 0b1a2dbe69ec0b..028e148cbac039 100644 --- a/crates/collab_ui/src/chat_panel/message_editor.rs +++ b/crates/collab_ui/src/chat_panel/message_editor.rs @@ -113,7 +113,9 @@ impl MessageEditor { editor.set_show_indent_guides(false, cx); editor.set_completion_provider(Box::new(MessageEditorCompletionProvider(this))); editor.set_auto_replace_emoji_shortcode( - MessageEditorSettings::get_global(cx).auto_replace_emoji_shortcode, + MessageEditorSettings::get_global(cx) + .auto_replace_emoji_shortcode + .unwrap_or_default(), ); }); @@ -128,7 +130,9 @@ impl MessageEditor { cx.observe_global::(|view, cx| { view.editor.update(cx, |editor, cx| { editor.set_auto_replace_emoji_shortcode( - MessageEditorSettings::get_global(cx).auto_replace_emoji_shortcode, + MessageEditorSettings::get_global(cx) + .auto_replace_emoji_shortcode + .unwrap_or_default(), ) }) }) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 3e6483c42dd726..72701101816995 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2813,7 +2813,7 @@ impl Panel for CollabPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = position, + move |settings, _| settings.dock = Some(position), ); } diff --git a/crates/collab_ui/src/notification_panel.rs b/crates/collab_ui/src/notification_panel.rs index 326e1f0f5bdc2b..33ca5a2952180c 100644 --- a/crates/collab_ui/src/notification_panel.rs +++ b/crates/collab_ui/src/notification_panel.rs @@ -672,7 +672,7 @@ impl Panel for NotificationPanel { settings::update_settings_file::( self.fs.clone(), cx, - move |settings, _| settings.dock = position, + move |settings, _| settings.dock = Some(position), ); } diff --git a/crates/collab_ui/src/panel_settings.rs b/crates/collab_ui/src/panel_settings.rs index a594f023bbcd3e..f9851d5797306c 100644 --- a/crates/collab_ui/src/panel_settings.rs +++ b/crates/collab_ui/src/panel_settings.rs @@ -2,84 +2,58 @@ use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -use ui::px; use workspace::dock::DockPosition; -#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] -#[serde(default)] +#[derive(Deserialize, Debug)] pub struct CollaborationPanelSettings { - /// Whether to show the panel button in the status bar. pub button: bool, - /// Where to dock the panel. pub dock: DockPosition, - /// Default width of the panel in pixels. pub default_width: Pixels, } -impl Default for CollaborationPanelSettings { - fn default() -> Self { - Self { - button: true, - dock: DockPosition::Left, - default_width: px(240.), - } - } -} - -#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] -#[serde(default)] +#[derive(Deserialize, Debug)] pub struct ChatPanelSettings { - /// Whether to show the panel button in the status bar. pub button: bool, - /// Where to dock the panel. pub dock: DockPosition, - /// Default width of the panel in pixels. pub default_width: Pixels, } -impl Default for ChatPanelSettings { - fn default() -> Self { - Self { - button: true, - dock: DockPosition::Right, - default_width: px(240.), - } - } -} - -#[derive(Clone, Deserialize, Debug, JsonSchema, Serialize)] -#[serde(default)] +#[derive(Deserialize, Debug)] pub struct NotificationPanelSettings { - /// Whether to show the panel button in the status bar. pub button: bool, - /// Where to dock the panel. pub dock: DockPosition, - /// Default width of the panel in pixels. pub default_width: Pixels, } -impl Default for NotificationPanelSettings { - fn default() -> Self { - Self { - button: true, - dock: DockPosition::Right, - default_width: px(380.), - } - } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct PanelSettingsContent { + /// Whether to show the panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Where to dock the panel. + /// + /// Default: left + pub dock: Option, + /// Default width of the panel in pixels. + /// + /// Default: 240 + pub default_width: Option, } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -#[serde(default)] pub struct MessageEditorSettings { /// Whether to automatically replace emoji shortcodes with emoji characters. /// For example: typing `:wave:` gets replaced with `👋`. - pub auto_replace_emoji_shortcode: bool, + /// + /// Default: false + pub auto_replace_emoji_shortcode: Option, } impl Settings for CollaborationPanelSettings { const KEY: Option<&'static str> = Some("collaboration_panel"); - type FileContent = Self; + type FileContent = PanelSettingsContent; fn load( sources: SettingsSources, @@ -92,7 +66,7 @@ impl Settings for CollaborationPanelSettings { impl Settings for ChatPanelSettings { const KEY: Option<&'static str> = Some("chat_panel"); - type FileContent = Self; + type FileContent = PanelSettingsContent; fn load( sources: SettingsSources, @@ -105,7 +79,7 @@ impl Settings for ChatPanelSettings { impl Settings for NotificationPanelSettings { const KEY: Option<&'static str> = Some("notification_panel"); - type FileContent = Self; + type FileContent = PanelSettingsContent; fn load( sources: SettingsSources, @@ -118,7 +92,7 @@ impl Settings for NotificationPanelSettings { impl Settings for MessageEditorSettings { const KEY: Option<&'static str> = Some("message_editor"); - type FileContent = Self; + type FileContent = MessageEditorSettings; fn load( sources: SettingsSources, diff --git a/crates/diagnostics/src/project_diagnostics_settings.rs b/crates/diagnostics/src/project_diagnostics_settings.rs index 34739bcd170adf..55879d0c426e2b 100644 --- a/crates/diagnostics/src/project_diagnostics_settings.rs +++ b/crates/diagnostics/src/project_diagnostics_settings.rs @@ -4,25 +4,23 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] -#[serde(default)] -/// Diagnostics configuration. +#[derive(Deserialize, Debug)] pub struct ProjectDiagnosticsSettings { - /// Whether to show warnings or not by default. pub include_warnings: bool, } -impl Default for ProjectDiagnosticsSettings { - fn default() -> Self { - Self { - include_warnings: true, - } - } +/// Diagnostics configuration. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct ProjectDiagnosticsSettingsContent { + /// Whether to show warnings or not by default. + /// + /// Default: true + include_warnings: Option, } impl Settings for ProjectDiagnosticsSettings { const KEY: Option<&'static str> = Some("diagnostics"); - type FileContent = Self; + type FileContent = ProjectDiagnosticsSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index e50bf67ab01a28..4792c6b2cb889d 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10640,7 +10640,7 @@ impl Editor { let fs = workspace.read(cx).app_state().fs.clone(); let current_show = TabBarSettings::get_global(cx).show; update_settings_file::(fs, cx, move |setting, _| { - setting.show = !current_show; + setting.show = Some(!current_show); }); } @@ -12563,7 +12563,7 @@ impl EditorSnapshot { let show_git_gutter = self.show_git_diff_gutter.unwrap_or_else(|| { matches!( ProjectSettings::get_global(cx).git.git_gutter, - GitGutterSetting::TrackedFiles + Some(GitGutterSetting::TrackedFiles) ) }); let gutter_settings = EditorSettings::get_global(cx).gutter; diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 0532fd7bdf880c..2614e4ea303d24 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -3,105 +3,38 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize, Clone)] pub struct EditorSettings { - /// Whether the cursor blinks in the editor. pub cursor_blink: bool, - /// How to highlight the current line in the editor. pub current_line_highlight: CurrentLineHighlight, - /// Whether to show the informational hover box when moving the mouse - /// over symbols in the editor. pub hover_popover_enabled: bool, - /// Whether to pop the completions menu while typing in an editor without - /// explicitly requesting it. pub show_completions_on_input: bool, - /// Whether to display inline and alongside documentation for items in the - /// completions menu. pub show_completion_documentation: bool, - /// The debounce delay before re-querying the language server for completion - /// documentation when not included in original completion list. pub completion_documentation_secondary_query_debounce: u64, - /// Whether to use additional LSP queries to format (and amend) the code after - /// every "trigger" symbol input, defined by LSP server capabilities. pub use_on_type_format: bool, - /// Toolbar related settings pub toolbar: Toolbar, - /// Scrollbar related settings pub scrollbar: Scrollbar, - /// Gutter related settings pub gutter: Gutter, - /// Whether the editor will scroll beyond the last line. pub scroll_beyond_last_line: ScrollBeyondLastLine, - /// The number of lines to keep above/below the cursor when auto-scrolling. pub vertical_scroll_margin: f32, - /// Scroll sensitivity multiplier. This multiplier is applied - /// to both the horizontal and vertical delta values while scrolling. pub scroll_sensitivity: f32, - /// Whether the line numbers on editors gutter are relative or not. pub relative_line_numbers: bool, - /// When to populate a new search's query based on the text under the cursor. pub seed_search_query_from_cursor: SeedQuerySetting, pub use_smartcase_search: bool, - /// The key to use for adding multiple cursors pub multi_cursor_modifier: MultiCursorModifier, - /// Hide the values of variables in `private` files, as defined by the - /// private_files setting. This only changes the visual representation, - /// the values are still present in the file and can be selected / copied / pasted pub redact_private_values: bool, - - /// How many lines to expand the multibuffer excerpts by default pub expand_excerpt_lines: u32, pub middle_click_paste: bool, - /// What to do when multibuffer is double clicked in some of its excerpts - /// (parts of singleton buffers). #[serde(default)] pub double_click_in_multibuffer: DoubleClickInMultibuffer, - /// Whether the editor search results will loop pub search_wrap: bool, #[serde(default)] pub search: SearchSettings, - /// Show method signatures in the editor, when inside parentheses. pub auto_signature_help: bool, - /// Whether to show the signature help after completion or a bracket pair inserted. - /// If `auto_signature_help` is enabled, this setting will be treated as enabled also. pub show_signature_help_after_edits: bool, - /// Jupyter REPL settings. pub jupyter: Jupyter, } -impl Default for EditorSettings { - fn default() -> Self { - Self { - cursor_blink: true, - current_line_highlight: CurrentLineHighlight::All, - hover_popover_enabled: true, - show_completions_on_input: true, - show_completion_documentation: true, - completion_documentation_secondary_query_debounce: 300, - use_on_type_format: true, - toolbar: Default::default(), - scrollbar: Default::default(), - gutter: Default::default(), - scroll_beyond_last_line: ScrollBeyondLastLine::OnePage, - vertical_scroll_margin: 3., - scroll_sensitivity: 1.0, - relative_line_numbers: false, - seed_search_query_from_cursor: SeedQuerySetting::Always, - multi_cursor_modifier: MultiCursorModifier::Alt, - redact_private_values: false, - expand_excerpt_lines: 3, - double_click_in_multibuffer: DoubleClickInMultibuffer::Select, - search_wrap: true, - auto_signature_help: false, - show_signature_help_after_edits: true, - jupyter: Default::default(), - use_smartcase_search: false, - middle_click_paste: true, - search: SearchSettings::default(), - } - } -} #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CurrentLineHighlight { @@ -139,93 +72,48 @@ pub enum DoubleClickInMultibuffer { Open, } -#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[derive(Debug, Clone, Deserialize)] pub struct Jupyter { /// Whether the Jupyter feature is enabled. + /// + /// Default: true pub enabled: bool, } -impl Default for Jupyter { - fn default() -> Self { - Self { enabled: true } - } +#[derive(Default, Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub struct JupyterContent { + /// Whether the Jupyter feature is enabled. + /// + /// Default: true + pub enabled: Option, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -#[serde(default)] pub struct Toolbar { - /// Whether to display breadcrumbs in the editor toolbar. pub breadcrumbs: bool, - /// Whether to display quick action buttons in the editor toolbar. pub quick_actions: bool, - /// Whether to show the selections menu in the editor toolbar pub selections_menu: bool, } -impl Default for Toolbar { - fn default() -> Self { - Self { - breadcrumbs: true, - quick_actions: true, - selections_menu: true, - } - } -} - #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct Scrollbar { - /// When to show the scrollbar in the editor. pub show: ShowScrollbar, - /// Whether to show git diff indicators in the scrollbar. pub git_diff: bool, - /// Whether to show buffer search result indicators in the scrollbar. pub selected_symbol: bool, - /// Whether to show selected symbol occurrences in the scrollbar. pub search_results: bool, - /// Whether to show diagnostic indicators in the scrollbar. pub diagnostics: bool, - /// Whether to show cursor positions in the scrollbar. pub cursors: bool, } -impl Default for Scrollbar { - fn default() -> Self { - Self { - show: ShowScrollbar::Auto, - git_diff: true, - selected_symbol: true, - search_results: true, - diagnostics: true, - cursors: true, - } - } -} - -/// Gutter-related settings. #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -#[serde(default)] pub struct Gutter { - /// Whether to show line numbers in the gutter. pub line_numbers: bool, - /// Whether to show code action buttons in the gutter. pub code_actions: bool, - /// Whether to show runnable buttons in the gutter. pub runnables: bool, - /// Whether to show fold buttons in the gutter. pub folds: bool, } -impl Default for Gutter { - fn default() -> Self { - Self { - line_numbers: true, - code_actions: true, - runnables: true, - folds: true, - } - } -} - /// When to show the scrollbar in the editor. /// /// Default: auto @@ -283,6 +171,188 @@ pub struct SearchSettings { pub regex: bool, } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct EditorSettingsContent { + /// Whether the cursor blinks in the editor. + /// + /// Default: true + pub cursor_blink: Option, + /// How to highlight the current line in the editor. + /// + /// Default: all + pub current_line_highlight: Option, + /// Whether to show the informational hover box when moving the mouse + /// over symbols in the editor. + /// + /// Default: true + pub hover_popover_enabled: Option, + + /// Whether to pop the completions menu while typing in an editor without + /// explicitly requesting it. + /// + /// Default: true + pub show_completions_on_input: Option, + /// Whether to display inline and alongside documentation for items in the + /// completions menu. + /// + /// Default: true + pub show_completion_documentation: Option, + /// The debounce delay before re-querying the language server for completion + /// documentation when not included in original completion list. + /// + /// Default: 300 ms + pub completion_documentation_secondary_query_debounce: Option, + /// Whether to use additional LSP queries to format (and amend) the code after + /// every "trigger" symbol input, defined by LSP server capabilities. + /// + /// Default: true + pub use_on_type_format: Option, + /// Toolbar related settings + pub toolbar: Option, + /// Scrollbar related settings + pub scrollbar: Option, + /// Gutter related settings + pub gutter: Option, + /// Whether the editor will scroll beyond the last line. + /// + /// Default: one_page + pub scroll_beyond_last_line: Option, + /// The number of lines to keep above/below the cursor when auto-scrolling. + /// + /// Default: 3. + pub vertical_scroll_margin: Option, + /// Scroll sensitivity multiplier. This multiplier is applied + /// to both the horizontal and vertical delta values while scrolling. + /// + /// Default: 1.0 + pub scroll_sensitivity: Option, + /// Whether the line numbers on editors gutter are relative or not. + /// + /// Default: false + pub relative_line_numbers: Option, + /// When to populate a new search's query based on the text under the cursor. + /// + /// Default: always + pub seed_search_query_from_cursor: Option, + pub use_smartcase_search: Option, + /// The key to use for adding multiple cursors + /// + /// Default: alt + pub multi_cursor_modifier: Option, + /// Hide the values of variables in `private` files, as defined by the + /// private_files setting. This only changes the visual representation, + /// the values are still present in the file and can be selected / copied / pasted + /// + /// Default: false + pub redact_private_values: Option, + + /// How many lines to expand the multibuffer excerpts by default + /// + /// Default: 3 + pub expand_excerpt_lines: Option, + + /// Whether to enable middle-click paste on Linux + /// + /// Default: true + pub middle_click_paste: Option, + + /// What to do when multibuffer is double clicked in some of its excerpts + /// (parts of singleton buffers). + /// + /// Default: select + pub double_click_in_multibuffer: Option, + /// Whether the editor search results will loop + /// + /// Default: true + pub search_wrap: Option, + + /// Defaults to use when opening a new buffer and project search items. + /// + /// Default: nothing is enabled + pub search: Option, + + /// Whether to automatically show a signature help pop-up or not. + /// + /// Default: false + pub auto_signature_help: Option, + + /// Whether to show the signature help pop-up after completions or bracket pairs inserted. + /// + /// Default: true + pub show_signature_help_after_edits: Option, + + /// Jupyter REPL settings. + pub jupyter: Option, +} + +// Toolbar related settings +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct ToolbarContent { + /// Whether to display breadcrumbs in the editor toolbar. + /// + /// Default: true + pub breadcrumbs: Option, + /// Whether to display quick action buttons in the editor toolbar. + /// + /// Default: true + pub quick_actions: Option, + + /// Whether to show the selections menu in the editor toolbar + /// + /// Default: true + pub selections_menu: Option, +} + +/// Scrollbar related settings +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] +pub struct ScrollbarContent { + /// When to show the scrollbar in the editor. + /// + /// Default: auto + pub show: Option, + /// Whether to show git diff indicators in the scrollbar. + /// + /// Default: true + pub git_diff: Option, + /// Whether to show buffer search result indicators in the scrollbar. + /// + /// Default: true + pub search_results: Option, + /// Whether to show selected symbol occurrences in the scrollbar. + /// + /// Default: true + pub selected_symbol: Option, + /// Whether to show diagnostic indicators in the scrollbar. + /// + /// Default: true + pub diagnostics: Option, + /// Whether to show cursor positions in the scrollbar. + /// + /// Default: true + pub cursors: Option, +} + +/// Gutter related settings +#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct GutterContent { + /// Whether to show line numbers in the gutter. + /// + /// Default: true + pub line_numbers: Option, + /// Whether to show code action buttons in the gutter. + /// + /// Default: true + pub code_actions: Option, + /// Whether to show runnable buttons in the gutter. + /// + /// Default: true + pub runnables: Option, + /// Whether to show fold buttons in the gutter. + /// + /// Default: true + pub folds: Option, +} + impl EditorSettings { pub fn jupyter_enabled(cx: &AppContext) -> bool { EditorSettings::get_global(cx).jupyter.enabled @@ -292,7 +362,7 @@ impl EditorSettings { impl Settings for EditorSettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = EditorSettingsContent; fn load( sources: SettingsSources, diff --git a/crates/editor/src/editor_settings_controls.rs b/crates/editor/src/editor_settings_controls.rs index 36d471dfa28f4b..bbe1b00324a787 100644 --- a/crates/editor/src/editor_settings_controls.rs +++ b/crates/editor/src/editor_settings_controls.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use gpui::{AppContext, FontFeatures, FontWeight}; -use project::project_settings::ProjectSettings; +use project::project_settings::{InlineBlameSettings, ProjectSettings}; use settings::{EditableSettingControl, Settings}; use theme::{FontFamilyCache, ThemeSettings}; use ui::{ @@ -296,7 +296,14 @@ impl EditableSettingControl for InlineGitBlameControl { value: Self::Value, _cx: &AppContext, ) { - settings.git.inline_blame.enabled = value; + if let Some(inline_blame) = settings.git.inline_blame.as_mut() { + inline_blame.enabled = value; + } else { + settings.git.inline_blame = Some(InlineBlameSettings { + enabled: false, + ..Default::default() + }); + } } } @@ -342,7 +349,14 @@ impl EditableSettingControl for LineNumbersControl { value: Self::Value, _cx: &AppContext, ) { - settings.gutter.line_numbers = value; + if let Some(gutter) = settings.gutter.as_mut() { + gutter.line_numbers = Some(value); + } else { + settings.gutter = Some(crate::editor_settings::GutterContent { + line_numbers: Some(value), + ..Default::default() + }); + } } } @@ -388,7 +402,7 @@ impl EditableSettingControl for RelativeLineNumbersControl { value: Self::Value, _cx: &AppContext, ) { - settings.relative_line_numbers = value; + settings.relative_line_numbers = Some(value); } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 7d42dc7a85d06e..0b1e0385ded4ad 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6964,7 +6964,7 @@ async fn test_handle_input_for_show_signature_help_auto_signature_help_true( cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = true; + settings.auto_signature_help = Some(true); }); }); }); @@ -7105,8 +7105,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = false; - settings.show_signature_help_after_edits = false; + settings.auto_signature_help = Some(false); + settings.show_signature_help_after_edits = Some(false); }); }); }); @@ -7232,8 +7232,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = false; - settings.show_signature_help_after_edits = true; + settings.auto_signature_help = Some(false); + settings.show_signature_help_after_edits = Some(true); }); }); }); @@ -7274,8 +7274,8 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut gpui: cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = true; - settings.show_signature_help_after_edits = false; + settings.auto_signature_help = Some(true); + settings.show_signature_help_after_edits = Some(false); }); }); }); @@ -7318,7 +7318,7 @@ async fn test_signature_help(cx: &mut gpui::TestAppContext) { cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.auto_signature_help = true; + settings.auto_signature_help = Some(true); }); }); }); @@ -7759,7 +7759,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) { cx.update(|cx| { cx.update_global::(|settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.show_completions_on_input = false; + settings.show_completions_on_input = Some(false); }); }) }); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 1c0a325b7620e5..d4f5c565c27ef0 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1283,7 +1283,10 @@ impl EditorElement { .row, ); - let git_gutter_setting = ProjectSettings::get_global(cx).git.git_gutter; + let git_gutter_setting = ProjectSettings::get_global(cx) + .git + .git_gutter + .unwrap_or_default(); let display_hunks = buffer_snapshot .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) .map(|hunk| diff_hunk_to_display(&hunk, snapshot)) @@ -1363,10 +1366,12 @@ impl EditorElement { }; let padded_line_end = line_end + em_width * INLINE_BLAME_PADDING_EM_WIDTHS; - let min_column_in_pixels = self.column_pixels( - ProjectSettings::get_global(cx).git.inline_blame.min_column as usize, - cx, - ); + let min_column_in_pixels = ProjectSettings::get_global(cx) + .git + .inline_blame + .and_then(|settings| settings.min_column) + .map(|col| self.column_pixels(col as usize, cx)) + .unwrap_or(px(0.)); let min_start = content_origin.x - scroll_pixel_position.x + min_column_in_pixels; cmp::max(padded_line_end, min_start) @@ -3326,7 +3331,7 @@ impl EditorElement { .unwrap_or_else(|| { matches!( ProjectSettings::get_global(cx).git.git_gutter, - GitGutterSetting::TrackedFiles + Some(GitGutterSetting::TrackedFiles) ) }); if show_git_gutter { diff --git a/crates/extension/src/extension_settings.rs b/crates/extension/src/extension_settings.rs index 715dc3ca827f51..a2ab7ac9cca73b 100644 --- a/crates/extension/src/extension_settings.rs +++ b/crates/extension/src/extension_settings.rs @@ -6,25 +6,18 @@ use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; use std::sync::Arc; -#[derive(Deserialize, Serialize, Debug, Clone, JsonSchema)] -#[serde(default)] +#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] pub struct ExtensionSettings { /// The extensions that should be automatically installed by Zed. /// /// This is used to make functionality provided by extensions (e.g., language support) /// available out-of-the-box. + #[serde(default)] pub auto_install_extensions: HashMap, bool>, + #[serde(default)] pub auto_update_extensions: HashMap, bool>, } -impl Default for ExtensionSettings { - fn default() -> Self { - Self { - auto_install_extensions: HashMap::from_iter([("html".into(), true)]), - auto_update_extensions: Default::default(), - } - } -} impl ExtensionSettings { /// Returns whether the given extension should be auto-installed. pub fn should_auto_install(&self, extension_id: &str) -> bool { diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index b2d6d7f2831720..f246e3cf4fb830 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -1000,7 +1000,7 @@ impl ExtensionsPage { this.update_settings::( selection, cx, - |setting, value| *setting = VimModeSetting(value), + |setting, value| *setting = Some(value), ); }), )), diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index de3d1dc74d2774..63e0f2b07915eb 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -180,10 +180,18 @@ pub(crate) enum LineIndicatorFormat { Long, } +/// Whether or not to automatically check for updates. +/// +/// Values: short, long +/// Default: short +#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)] +#[serde(transparent)] +pub(crate) struct LineIndicatorFormatContent(LineIndicatorFormat); + impl Settings for LineIndicatorFormat { const KEY: Option<&'static str> = Some("line_indicator_format"); - type FileContent = Self; + type FileContent = Option; fn load( sources: SettingsSources, @@ -191,9 +199,9 @@ impl Settings for LineIndicatorFormat { ) -> anyhow::Result { let format = [sources.release_channel, sources.user] .into_iter() - .find_map(|value| value.copied()) - .unwrap_or(*sources.default); + .find_map(|value| value.copied().flatten()) + .unwrap_or(sources.default.ok_or_else(Self::missing_default)?); - Ok(format) + Ok(format.0) } } diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index b2035923605e0c..8de9e6f009d733 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -5,7 +5,6 @@ use core::fmt::Debug; use derive_more::{Add, AddAssign, Div, DivAssign, Mul, Neg, Sub, SubAssign}; use refineable::Refineable; -use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use std::{ cmp::{self, PartialOrd}, @@ -2202,7 +2201,6 @@ impl From for Radians { PartialEq, Serialize, Deserialize, - JsonSchema, )] #[repr(transparent)] pub struct Pixels(pub f32); diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 7a6b758a2554ec..e1fcaaba28b4f1 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -70,10 +70,10 @@ pub struct LanguageSettings { /// The column at which to soft-wrap lines, for buffers where soft-wrap /// is enabled. pub preferred_line_length: u32, - /// Whether to show wrap guides (vertical rulers) in the editor. - /// Setting this to true will show a guide at the 'preferred_line_length' value - /// if softwrap is set to 'preferred_line_length', and will show any - /// additional guides as specified by the 'wrap_guides' setting. + // Whether to show wrap guides (vertical rulers) in the editor. + // Setting this to true will show a guide at the 'preferred_line_length' value + // if softwrap is set to 'preferred_line_length', and will show any + // additional guides as specified by the 'wrap_guides' setting. pub show_wrap_guides: bool, /// Character counts at which to show wrap guides (vertical rulers) in the editor. pub wrap_guides: Vec, diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 102eb1ef2f8563..6b5f74c2634b45 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -7,13 +7,10 @@ use feature_flags::FeatureFlagAppExt; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext}; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; -use language::{ - CodeLabel, Language, LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate, -}; +use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; -use rope::Rope; use serde_json::{json, Value}; use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore}; use smol::{ @@ -205,30 +202,6 @@ impl LspAdapter for JsonLspAdapter { }))) } - async fn label_for_completion( - &self, - item: &lsp::CompletionItem, - language: &Arc, - ) -> Option { - let text = if let Some(description) = item - .label_details - .as_ref() - .and_then(|label_details| label_details.description.as_ref()) - { - format!("{} {}", item.label, description) - } else if let Some(detail) = &item.detail { - format!("{} {}", item.label, detail) - } else { - item.label.clone() - }; - let rope = Rope::from(item.label.as_str()); - let runs = language.highlight_text(&rope, 0..item.label.len()); - Some(language::CodeLabel { - text, - runs, - filter_range: 0..item.label.len(), - }) - } async fn workspace_configuration( self: Arc, _: &Arc, diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 361607533b84b0..c5f0187c229fa8 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -24,12 +24,12 @@ use editor::{ use file_icons::FileIcons; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ - actions, anchored, deferred, div, impl_actions, uniform_list, Action, AnyElement, AppContext, - AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, EventEmitter, - FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, KeyContext, Model, - MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, SharedString, Stateful, - Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, VisualContext, - WeakView, WindowContext, + actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, + AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, + EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, + KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, + SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, + VisualContext, WeakView, WindowContext, }; use itertools::Itertools; use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; @@ -1938,7 +1938,7 @@ impl OutlinePanel { .child( ListItem::new(item_id) .indent_level(depth) - .indent_step_size(settings.indent_size) + .indent_step_size(px(settings.indent_size)) .selected(is_active) .when_some(icon_element, |list_item, icon_element| { list_item.child(h_flex().child(icon_element)) @@ -3801,7 +3801,7 @@ impl Panel for OutlinePanel { DockPosition::Left | DockPosition::Bottom => OutlinePanelDockPosition::Left, DockPosition::Right => OutlinePanelDockPosition::Right, }; - settings.dock = dock; + settings.dock = Some(dock); }, ); } diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index a8e51b96c5c3aa..e19fc3c0084947 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -1,5 +1,4 @@ -use anyhow; -use gpui::{px, Pixels}; +use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; @@ -11,51 +10,66 @@ pub enum OutlinePanelDockPosition { Right, } -#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, JsonSchema)] +#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct OutlinePanelSettings { - /// Whether to show the outline panel button in the status bar. pub button: bool, - /// Customize default width (in pixels) taken by outline panel pub default_width: Pixels, - /// The position of outline panel pub dock: OutlinePanelDockPosition, - /// Whether to show file icons in the outline panel. pub file_icons: bool, - /// Whether to show folder icons or chevrons for directories in the outline panel. pub folder_icons: bool, - /// Whether to show the git status in the outline panel. pub git_status: bool, + pub indent_size: f32, + pub auto_reveal_entries: bool, + pub auto_fold_dirs: bool, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct OutlinePanelSettingsContent { + /// Whether to show the outline panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Customize default width (in pixels) taken by outline panel + /// + /// Default: 240 + pub default_width: Option, + /// The position of outline panel + /// + /// Default: left + pub dock: Option, + /// Whether to show file icons in the outline panel. + /// + /// Default: true + pub file_icons: Option, + /// Whether to show folder icons or chevrons for directories in the outline panel. + /// + /// Default: true + pub folder_icons: Option, + /// Whether to show the git status in the outline panel. + /// + /// Default: true + pub git_status: Option, /// Amount of indentation (in pixels) for nested items. - pub indent_size: Pixels, + /// + /// Default: 20 + pub indent_size: Option, /// Whether to reveal it in the outline panel automatically, /// when a corresponding project entry becomes active. /// Gitignored entries are never auto revealed. - pub auto_reveal_entries: bool, + /// + /// Default: true + pub auto_reveal_entries: Option, /// Whether to fold directories automatically /// when directory has only one directory inside. - pub auto_fold_dirs: bool, -} - -impl Default for OutlinePanelSettings { - fn default() -> Self { - Self { - button: true, - default_width: px(240.), - dock: OutlinePanelDockPosition::Left, - file_icons: true, - folder_icons: true, - auto_fold_dirs: true, - auto_reveal_entries: true, - indent_size: px(20.), - git_status: true, - } - } + /// + /// Default: true + pub auto_fold_dirs: Option, } impl Settings for OutlinePanelSettings { const KEY: Option<&'static str> = Some("outline_panel"); - type FileContent = Self; + type FileContent = OutlinePanelSettingsContent; fn load( sources: SettingsSources, diff --git a/crates/performance/src/performance.rs b/crates/performance/src/performance.rs deleted file mode 100644 index db2388c59ae1c0..00000000000000 --- a/crates/performance/src/performance.rs +++ /dev/null @@ -1,184 +0,0 @@ -use std::time::Instant; - -use anyhow::Result; -use gpui::{ - div, AppContext, InteractiveElement as _, Render, StatefulInteractiveElement as _, - Subscription, ViewContext, VisualContext, -}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsStore}; -use workspace::{ - ui::{Label, LabelCommon, LabelSize, Tooltip}, - ItemHandle, StatusItemView, Workspace, -}; - -const SHOW_STARTUP_TIME_DURATION: std::time::Duration = std::time::Duration::from_secs(5); - -pub fn init(cx: &mut AppContext) { - PerformanceSettings::register(cx); - - let mut enabled = PerformanceSettings::get_global(cx).show_in_status_bar; - let start_time = Instant::now(); - let mut _observe_workspaces = toggle_status_bar_items(enabled, start_time, cx); - - cx.observe_global::(move |cx| { - let new_value = PerformanceSettings::get_global(cx).show_in_status_bar; - if new_value != enabled { - enabled = new_value; - _observe_workspaces = toggle_status_bar_items(enabled, start_time, cx); - } - }) - .detach(); -} - -fn toggle_status_bar_items( - enabled: bool, - start_time: Instant, - cx: &mut AppContext, -) -> Option { - for window in cx.windows() { - if let Some(workspace) = window.downcast::() { - workspace - .update(cx, |workspace, cx| { - toggle_status_bar_item(workspace, enabled, start_time, cx); - }) - .ok(); - } - } - - if enabled { - log::info!("performance metrics display enabled"); - Some(cx.observe_new_views::(move |workspace, cx| { - toggle_status_bar_item(workspace, true, start_time, cx); - })) - } else { - log::info!("performance metrics display disabled"); - None - } -} - -struct PerformanceStatusBarItem { - display_mode: DisplayMode, -} - -#[derive(Copy, Clone, Debug)] -enum DisplayMode { - StartupTime, - Fps, -} - -impl PerformanceStatusBarItem { - fn new(start_time: Instant, cx: &mut ViewContext) -> Self { - let now = Instant::now(); - let display_mode = if now < start_time + SHOW_STARTUP_TIME_DURATION { - DisplayMode::StartupTime - } else { - DisplayMode::Fps - }; - - let this = Self { display_mode }; - - if let DisplayMode::StartupTime = display_mode { - cx.spawn(|this, mut cx| async move { - let now = Instant::now(); - let remaining_duration = - (start_time + SHOW_STARTUP_TIME_DURATION).saturating_duration_since(now); - cx.background_executor().timer(remaining_duration).await; - this.update(&mut cx, |this, cx| { - this.display_mode = DisplayMode::Fps; - cx.notify(); - }) - .ok(); - }) - .detach(); - } - - this - } -} - -impl Render for PerformanceStatusBarItem { - fn render(&mut self, cx: &mut gpui::ViewContext) -> impl gpui::IntoElement { - let text = match self.display_mode { - DisplayMode::StartupTime => cx - .time_to_first_window_draw() - .map_or("Pending".to_string(), |duration| { - format!("{}ms", duration.as_millis()) - }), - DisplayMode::Fps => cx.fps().map_or("".to_string(), |fps| { - format!("{:3} FPS", fps.round() as u32) - }), - }; - - use gpui::ParentElement; - let display_mode = self.display_mode; - div() - .id("performance status") - .child(Label::new(text).size(LabelSize::Small)) - .tooltip(move |cx| match display_mode { - DisplayMode::StartupTime => Tooltip::text("Time to first window draw", cx), - DisplayMode::Fps => cx - .new_view(|cx| { - let tooltip = Tooltip::new("Current FPS"); - if let Some(time_to_first) = cx.time_to_first_window_draw() { - tooltip.meta(format!( - "Time to first window draw: {}ms", - time_to_first.as_millis() - )) - } else { - tooltip - } - }) - .into(), - }) - } -} - -impl StatusItemView for PerformanceStatusBarItem { - fn set_active_pane_item( - &mut self, - _active_pane_item: Option<&dyn ItemHandle>, - _cx: &mut gpui::ViewContext, - ) { - // This is not currently used. - } -} - -fn toggle_status_bar_item( - workspace: &mut Workspace, - enabled: bool, - start_time: Instant, - cx: &mut ViewContext, -) { - if enabled { - workspace.status_bar().update(cx, |bar, cx| { - bar.add_right_item( - cx.new_view(|cx| PerformanceStatusBarItem::new(start_time, cx)), - cx, - ) - }); - } else { - workspace.status_bar().update(cx, |bar, cx| { - bar.remove_items_of_type::(cx); - }); - } -} - -/// Configuration of the display of performance details. -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] -#[serde(default)] -pub struct PerformanceSettings { - /// Display the time to first window draw and frame rate in the status bar. - pub show_in_status_bar: bool, -} - -impl Settings for PerformanceSettings { - const KEY: Option<&'static str> = Some("performance"); - - type FileContent = Self; - - fn load(sources: SettingsSources, _: &mut AppContext) -> Result { - sources.json_merge() - } -} diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 3c21b1c5e8caf1..70b2eccf237c62 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -20,7 +20,6 @@ use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId}; use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent}; #[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] -#[serde(default)] pub struct ProjectSettings { /// Configuration for language servers. /// @@ -42,6 +41,7 @@ pub struct ProjectSettings { pub load_direnv: DirenvSettings, /// Configuration for session-related features + #[serde(default)] pub session: SessionSettings, } @@ -59,31 +59,36 @@ pub enum DirenvSettings { } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] -#[serde(default)] pub struct GitSettings { /// Whether or not to show the git gutter. /// /// Default: tracked_files - pub git_gutter: GitGutterSetting, + pub git_gutter: Option, pub gutter_debounce: Option, /// Whether or not to show git blame data inline in /// the currently focused line. /// /// Default: on - pub inline_blame: InlineBlameSettings, + pub inline_blame: Option, } impl GitSettings { pub fn inline_blame_enabled(&self) -> bool { #[allow(unknown_lints, clippy::manual_unwrap_or_default)] - self.inline_blame.enabled + match self.inline_blame { + Some(InlineBlameSettings { enabled, .. }) => enabled, + _ => false, + } } pub fn inline_blame_delay(&self) -> Option { - self.inline_blame - .delay_ms - .gt(&0) - .then(|| Duration::from_millis(self.inline_blame.delay_ms)) + match self.inline_blame { + Some(InlineBlameSettings { + delay_ms: Some(delay_ms), + .. + }) if delay_ms > 0 => Some(Duration::from_millis(delay_ms)), + _ => None, + } } } @@ -97,34 +102,28 @@ pub enum GitGutterSetting { Hide, } -#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] -#[serde(default)] pub struct InlineBlameSettings { /// Whether or not to show git blame data inline in /// the currently focused line. /// /// Default: true + #[serde(default = "true_value")] pub enabled: bool, /// Whether to only show the inline blame information /// after a delay once the cursor stops moving. /// /// Default: 0 - pub delay_ms: u64, + pub delay_ms: Option, /// The minimum column number to show the inline blame information at /// /// Default: 0 - pub min_column: u32, + pub min_column: Option, } -impl Default for InlineBlameSettings { - fn default() -> Self { - Self { - enabled: true, - delay_ms: 0, - min_column: 0, - } - } +const fn true_value() -> bool { + true } #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 6ca843875b48a5..c77a2170dd01cd 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2289,7 +2289,7 @@ impl ProjectPanel { .child( ListItem::new(entry_id.to_proto() as usize) .indent_level(depth) - .indent_step_size(settings.indent_size) + .indent_step_size(px(settings.indent_size)) .selected(is_marked || is_active) .when_some(canonical_path, |this, path| { this.end_slot::( @@ -2817,7 +2817,7 @@ impl Render for DraggedProjectEntryView { this.bg(cx.theme().colors().background).w(self.width).child( ListItem::new(self.selection.entry_id.to_proto() as usize) .indent_level(self.details.depth) - .indent_step_size(settings.indent_size) + .indent_step_size(px(settings.indent_size)) .child(if let Some(icon) = &self.details.icon { div().child(Icon::from_path(icon.clone())) } else { @@ -2855,7 +2855,7 @@ impl Panel for ProjectPanel { DockPosition::Left | DockPosition::Bottom => ProjectPanelDockPosition::Left, DockPosition::Right => ProjectPanelDockPosition::Right, }; - settings.dock = dock; + settings.dock = Some(dock); }, ); } @@ -3029,7 +3029,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { worktree_settings.file_scan_exclusions = - vec!["**/.git".to_string(), "**/4/**".to_string()]; + Some(vec!["**/.git".to_string(), "**/4/**".to_string()]); }); }); }); @@ -4818,10 +4818,10 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Vec::new(); + worktree_settings.file_scan_exclusions = Some(Vec::new()); }); store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = false + project_panel_settings.auto_reveal_entries = Some(false) }); }) }); @@ -4940,7 +4940,7 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = true + project_panel_settings.auto_reveal_entries = Some(true) }); }) }); @@ -5054,10 +5054,10 @@ mod tests { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Vec::new(); + worktree_settings.file_scan_exclusions = Some(Vec::new()); }); store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_reveal_entries = false + project_panel_settings.auto_reveal_entries = Some(false) }); }) }); @@ -5256,7 +5256,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - vec!["excluded_dir".to_string(), "**/.git".to_string()]; + Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]); }); }); }); @@ -5569,10 +5569,10 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_fold_dirs = false; + project_panel_settings.auto_fold_dirs = Some(false); }); store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Vec::new(); + worktree_settings.file_scan_exclusions = Some(Vec::new()); }); }); }); @@ -5591,10 +5591,10 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_panel_settings| { - project_panel_settings.auto_fold_dirs = false; + project_panel_settings.auto_fold_dirs = Some(false); }); store.update_user_settings::(cx, |worktree_settings| { - worktree_settings.file_scan_exclusions = Vec::new(); + worktree_settings.file_scan_exclusions = Some(Vec::new()); }); }); }); diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 6910b4627a00a0..4d73ae92456da9 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -2,7 +2,6 @@ use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -use ui::px; #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)] #[serde(rename_all = "snake_case")] @@ -11,50 +10,20 @@ pub enum ProjectPanelDockPosition { Right, } -#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, JsonSchema)] -#[serde(default)] +#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct ProjectPanelSettings { - /// Whether to show the project panel button in the status bar. pub button: bool, - /// Customize default width (in pixels) taken by project panel pub default_width: Pixels, - /// The position of project panel pub dock: ProjectPanelDockPosition, - /// Whether to show file icons in the project panel. pub file_icons: bool, - /// Whether to show folder icons or chevrons for directories in the project panel. pub folder_icons: bool, - /// Whether to show the git status in the project panel. pub git_status: bool, - /// Amount of indentation (in pixels) for nested items. - pub indent_size: Pixels, - /// Whether to reveal it in the project panel automatically, - /// when a corresponding project entry becomes active. - /// Gitignored entries are never auto revealed. + pub indent_size: f32, pub auto_reveal_entries: bool, - /// Whether to fold directories automatically - /// when directory has only one directory inside. pub auto_fold_dirs: bool, - /// Scrollbar-related settings pub scrollbar: ScrollbarSettings, } -impl Default for ProjectPanelSettings { - fn default() -> Self { - Self { - button: true, - default_width: px(240.), - dock: ProjectPanelDockPosition::Left, - file_icons: true, - folder_icons: true, - git_status: true, - indent_size: px(20.), - auto_reveal_entries: true, - auto_fold_dirs: true, - scrollbar: Default::default(), - } - } -} /// When to show the scrollbar in the project panel. /// /// Default: always @@ -68,7 +37,7 @@ pub enum ShowScrollbar { Never, } -#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettings { /// When to show the scrollbar in the project panel. /// @@ -76,10 +45,63 @@ pub struct ScrollbarSettings { pub show: ShowScrollbar, } +#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct ScrollbarSettingsContent { + /// When to show the scrollbar in the project panel. + /// + /// Default: always + pub show: Option, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct ProjectPanelSettingsContent { + /// Whether to show the project panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Customize default width (in pixels) taken by project panel + /// + /// Default: 240 + pub default_width: Option, + /// The position of project panel + /// + /// Default: left + pub dock: Option, + /// Whether to show file icons in the project panel. + /// + /// Default: true + pub file_icons: Option, + /// Whether to show folder icons or chevrons for directories in the project panel. + /// + /// Default: true + pub folder_icons: Option, + /// Whether to show the git status in the project panel. + /// + /// Default: true + pub git_status: Option, + /// Amount of indentation (in pixels) for nested items. + /// + /// Default: 20 + pub indent_size: Option, + /// Whether to reveal it in the project panel automatically, + /// when a corresponding project entry becomes active. + /// Gitignored entries are never auto revealed. + /// + /// Default: true + pub auto_reveal_entries: Option, + /// Whether to fold directories automatically + /// when directory has only one directory inside. + /// + /// Default: false + pub auto_fold_dirs: Option, + /// Scrollbar-related settings + pub scrollbar: Option, +} + impl Settings for ProjectPanelSettings { const KEY: Option<&'static str> = Some("project_panel"); - type FileContent = Self; + type FileContent = ProjectPanelSettingsContent; fn load( sources: SettingsSources, diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index b7fa6359459290..d8b10f31f9f55e 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -48,6 +48,7 @@ use workspace::{notifications::DetachAndPromptErr, AppState, ModalView, Workspac use crate::open_dev_server_project; use crate::ssh_connections::connect_over_ssh; use crate::ssh_connections::open_ssh_project; +use crate::ssh_connections::RemoteSettingsContent; use crate::ssh_connections::SshConnection; use crate::ssh_connections::SshConnectionModal; use crate::ssh_connections::SshProject; @@ -1023,7 +1024,7 @@ impl DevServerProjects { fn update_settings_file( &mut self, cx: &mut ViewContext, - f: impl FnOnce(&mut SshSettings) + Send + Sync + 'static, + f: impl FnOnce(&mut RemoteSettingsContent) + Send + Sync + 'static, ) { let Some(fs) = self .workspace diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index b54196022dbcde..8da4284b7f56a7 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -22,24 +22,8 @@ use ui::{ use util::paths::PathWithPosition; use workspace::{AppState, ModalView, Workspace}; -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] pub struct SshSettings { - /// ssh_connections is an array of ssh connections. - /// By default this setting is null, which disables the direct ssh connection support. - /// You can configure these from `project: Open Remote` in the command palette. - /// Zed's ssh support will pull configuration from your ~/.ssh too. - /// Examples: - /// [ - /// { - /// "host": "example-box", - /// "projects": [ - /// { - /// "paths": ["/home/user/code/zed"] - /// } - /// ] - /// } - /// ] pub ssh_connections: Option>, } @@ -78,10 +62,15 @@ pub struct SshProject { pub paths: Vec, } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct RemoteSettingsContent { + pub ssh_connections: Option>, +} + impl Settings for SshSettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = RemoteSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/repl/src/jupyter_settings.rs b/crates/repl/src/jupyter_settings.rs index f441da4790b804..aefef6cec5b44d 100644 --- a/crates/repl/src/jupyter_settings.rs +++ b/crates/repl/src/jupyter_settings.rs @@ -6,10 +6,8 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Debug, Default)] pub struct JupyterSettings { - /// Default kernels to select for each language. pub kernel_selections: HashMap, } @@ -22,10 +20,26 @@ impl JupyterSettings { } } +#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] +pub struct JupyterSettingsContent { + /// Default kernels to select for each language. + /// + /// Default: `{}` + pub kernel_selections: Option>, +} + +impl Default for JupyterSettingsContent { + fn default() -> Self { + JupyterSettingsContent { + kernel_selections: Some(HashMap::new()), + } + } +} + impl Settings for JupyterSettings { const KEY: Option<&'static str> = Some("jupyter"); - type FileContent = Self; + type FileContent = JupyterSettingsContent; fn load( sources: SettingsSources, @@ -37,8 +51,10 @@ impl Settings for JupyterSettings { let mut settings = JupyterSettings::default(); for value in sources.defaults_and_customizations() { - for (k, v) in &value.kernel_selections { - settings.kernel_selections.insert(k.clone(), v.clone()); + if let Some(source) = &value.kernel_selections { + for (k, v) in source { + settings.kernel_selections.insert(k.clone(), v.clone()); + } } } diff --git a/crates/tasks_ui/src/settings.rs b/crates/tasks_ui/src/settings.rs index 4ad6f607b76c3a..1bcd4962644232 100644 --- a/crates/tasks_ui/src/settings.rs +++ b/crates/tasks_ui/src/settings.rs @@ -2,26 +2,22 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Serialize, Deserialize, PartialEq, JsonSchema)] -#[serde(default)] -/// Task-related settings. +#[derive(Serialize, Deserialize, PartialEq, Default)] pub(crate) struct TaskSettings { - /// Whether to show task status indicator in the status bar. Default: true pub(crate) show_status_indicator: bool, } -impl Default for TaskSettings { - fn default() -> Self { - Self { - show_status_indicator: true, - } - } +/// Task-related settings. +#[derive(Serialize, Deserialize, PartialEq, Default, Clone, JsonSchema)] +pub(crate) struct TaskSettingsContent { + /// Whether to show task status indicator in the status bar. Default: true + show_status_indicator: Option, } impl Settings for TaskSettings { const KEY: Option<&'static str> = Some("task"); - type FileContent = Self; + type FileContent = TaskSettingsContent; fn load( sources: SettingsSources, diff --git a/crates/vim/src/digraph.rs b/crates/vim/src/digraph.rs index 282016cfdad277..443b7ff37801eb 100644 --- a/crates/vim/src/digraph.rs +++ b/crates/vim/src/digraph.rs @@ -132,7 +132,7 @@ mod test { let mut custom_digraphs = HashMap::default(); custom_digraphs.insert("|-".into(), "⊢".into()); custom_digraphs.insert(":)".into(), "👨‍💻".into()); - s.custom_digraphs = custom_digraphs; + s.custom_digraphs = Some(custom_digraphs); }); }); diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 815086d0be0470..8198c0da53b4f0 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -1184,7 +1184,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_multiline_find = true; + s.use_multiline_find = Some(true); }); }); @@ -1226,7 +1226,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_multiline_find = true; + s.use_multiline_find = Some(true); }); }); @@ -1268,7 +1268,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_smartcase_find = true; + s.use_smartcase_find = Some(true); }); }); diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index 6465e33e0f3b53..05469dbf9f168f 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -291,7 +291,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); @@ -327,7 +327,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::OnYank + s.use_system_clipboard = Some(UseSystemClipboard::OnYank) }); }); @@ -584,7 +584,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); @@ -630,7 +630,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); @@ -659,7 +659,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); @@ -707,7 +707,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.use_system_clipboard = UseSystemClipboard::Never + s.use_system_clipboard = Some(UseSystemClipboard::Never) }); }); diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index 6a20ea4eb33bb1..f89faa3748372f 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -294,7 +294,7 @@ mod test { cx.update_global(|store: &mut SettingsStore, cx| { store.update_user_settings::(cx, |s| { - s.scroll_beyond_last_line = ScrollBeyondLastLine::Off + s.scroll_beyond_last_line = Some(ScrollBeyondLastLine::Off) }); }); diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 6418475ad2f576..28f33d49d85f20 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -542,7 +542,7 @@ mod test { let mut cx = VimTestContext::new(cx, true).await; cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| s.search_wrap = false); + store.update_user_settings::(cx, |s| s.search_wrap = Some(false)); }); cx.set_state("ˇhi\nhigh\nhi\n", Mode::Normal); @@ -655,7 +655,7 @@ mod test { // check that searching with unable search wrap cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| s.search_wrap = false); + store.update_user_settings::(cx, |s| s.search_wrap = Some(false)); }); cx.set_state("aa\nbˇb\ncc\ncc\ncc\n", Mode::Normal); cx.simulate_keystrokes("/ c c enter"); diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index be7db47315f22d..9c61e9cd938292 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -1300,7 +1300,7 @@ async fn test_command_alias(cx: &mut gpui::TestAppContext) { store.update_user_settings::(cx, |s| { let mut aliases = HashMap::default(); aliases.insert("Q".to_string(), "upper".to_string()); - s.command_aliases = aliases + s.command_aliases = Some(aliases) }); }); diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index b68d2ede8b09b7..c985f68e701eb1 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -57,7 +57,7 @@ impl VimTestContext { pub fn new_with_lsp(mut cx: EditorLspTestContext, enabled: bool) -> VimTestContext { cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = VimModeSetting(enabled)); + store.update_user_settings::(cx, |s| *s = Some(enabled)); }); settings::KeymapFile::load_asset("keymaps/default-macos.json", cx).unwrap(); if enabled { @@ -105,7 +105,7 @@ impl VimTestContext { pub fn enable_vim(&mut self) { self.cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = VimModeSetting(true)); + store.update_user_settings::(cx, |s| *s = Some(true)); }); }) } @@ -113,7 +113,7 @@ impl VimTestContext { pub fn disable_vim(&mut self) { self.cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = VimModeSetting(false)); + store.update_user_settings::(cx, |s| *s = Some(false)); }); }) } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 6baca17948e8ef..6e03374c22595e 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -46,8 +46,6 @@ use crate::state::ReplayableAction; /// Whether or not to enable Vim mode. /// /// Default: false -#[derive(Copy, Clone, Default, Deserialize, Serialize, JsonSchema)] -#[serde(default, transparent)] pub struct VimModeSetting(pub bool); /// An Action to Switch between modes @@ -101,7 +99,7 @@ pub fn init(cx: &mut AppContext) { let fs = workspace.app_state().fs.clone(); let currently_enabled = Vim::enabled(cx); update_settings_file::(fs, cx, move |setting, _| { - *setting = VimModeSetting(!currently_enabled); + *setting = Some(!currently_enabled) }) }); @@ -1070,10 +1068,12 @@ impl Vim { impl Settings for VimModeSetting { const KEY: Option<&'static str> = Some("vim_mode"); - type FileContent = Self; + type FileContent = Option; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { - Ok(sources.user.copied().unwrap_or(*sources.default)) + Ok(Self(sources.user.copied().flatten().unwrap_or( + sources.default.ok_or_else(Self::missing_default)?, + ))) } } @@ -1089,8 +1089,7 @@ pub enum UseSystemClipboard { OnYank, } -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] struct VimSettings { pub toggle_relative_line_numbers: bool, pub use_system_clipboard: UseSystemClipboard, @@ -1099,22 +1098,19 @@ struct VimSettings { pub custom_digraphs: HashMap>, } -impl Default for VimSettings { - fn default() -> Self { - Self { - toggle_relative_line_numbers: false, - use_system_clipboard: UseSystemClipboard::Always, - use_multiline_find: false, - use_smartcase_find: false, - custom_digraphs: Default::default(), - } - } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +struct VimSettingsContent { + pub toggle_relative_line_numbers: Option, + pub use_system_clipboard: Option, + pub use_multiline_find: Option, + pub use_smartcase_find: Option, + pub custom_digraphs: Option>>, } impl Settings for VimSettings { const KEY: Option<&'static str> = Some("vim"); - type FileContent = Self; + type FileContent = VimSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/welcome/src/base_keymap_picker.rs b/crates/welcome/src/base_keymap_picker.rs index fd7361f9b3d002..96a9df9c3c8cca 100644 --- a/crates/welcome/src/base_keymap_picker.rs +++ b/crates/welcome/src/base_keymap_picker.rs @@ -177,7 +177,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate { .report_setting_event("keymap", base_keymap.to_string()); update_settings_file::(self.fs.clone(), cx, move |setting, _| { - *setting = base_keymap; + *setting = Some(base_keymap) }); } diff --git a/crates/welcome/src/base_keymap_setting.rs b/crates/welcome/src/base_keymap_setting.rs index 0c1724627cf6d9..1b52bbc9f94fbd 100644 --- a/crates/welcome/src/base_keymap_setting.rs +++ b/crates/welcome/src/base_keymap_setting.rs @@ -87,15 +87,15 @@ impl BaseKeymap { impl Settings for BaseKeymap { const KEY: Option<&'static str> = Some("base_keymap"); - type FileContent = Self; + type FileContent = Option; fn load( sources: SettingsSources, _: &mut gpui::AppContext, ) -> anyhow::Result { - if let Some(user_value) = sources.user.copied() { + if let Some(Some(user_value)) = sources.user.copied() { return Ok(user_value); } - Ok(*sources.default) + sources.default.ok_or_else(Self::missing_default) } } diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index 787c2e589bb0fe..fc837c68671a31 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -188,7 +188,7 @@ impl Render for WelcomePage { this.update_settings::( selection, cx, - |setting, value| *setting = VimModeSetting(value), + |setting, value| *setting = Some(value), ); }), )) diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 46b8f3bf7fa90b..935f0268b62ffe 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -36,49 +36,20 @@ use util::ResultExt; pub const LEADER_UPDATE_THROTTLE: Duration = Duration::from_millis(200); -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] pub struct ItemSettings { - /// Whether to show the Git file status on a tab item. pub git_status: bool, - /// Position of the close button in a tab. pub close_position: ClosePosition, - /// Whether to show the file icon for a tab. pub file_icons: bool, } -impl Default for ItemSettings { - fn default() -> Self { - Self { - git_status: false, - close_position: ClosePosition::Right, - file_icons: false, - } - } -} - -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] pub struct PreviewTabsSettings { - /// Whether to show opened editors as preview tabs. - /// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic. pub enabled: bool, - /// Whether to open tabs in preview mode when selected from the file finder. pub enable_preview_from_file_finder: bool, - /// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. pub enable_preview_from_code_navigation: bool, } -impl Default for PreviewTabsSettings { - fn default() -> Self { - Self { - enabled: true, - enable_preview_from_file_finder: false, - enable_preview_from_code_navigation: false, - } - } -} - #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "lowercase")] pub enum ClosePosition { @@ -96,10 +67,43 @@ impl ClosePosition { } } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct ItemSettingsContent { + /// Whether to show the Git file status on a tab item. + /// + /// Default: false + git_status: Option, + /// Position of the close button in a tab. + /// + /// Default: right + close_position: Option, + /// Whether to show the file icon for a tab. + /// + /// Default: false + file_icons: Option, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct PreviewTabsSettingsContent { + /// Whether to show opened editors as preview tabs. + /// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic. + /// + /// Default: true + enabled: Option, + /// Whether to open tabs in preview mode when selected from the file finder. + /// + /// Default: false + enable_preview_from_file_finder: Option, + /// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. + /// + /// Default: false + enable_preview_from_code_navigation: Option, +} + impl Settings for ItemSettings { const KEY: Option<&'static str> = Some("tabs"); - type FileContent = Self; + type FileContent = ItemSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() @@ -109,7 +113,7 @@ impl Settings for ItemSettings { impl Settings for PreviewTabsSettings { const KEY: Option<&'static str> = Some("preview_tabs"); - type FileContent = Self; + type FileContent = PreviewTabsSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 0d774277944c02..a7c63c57f698d4 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -6465,7 +6465,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = AutosaveSetting::OnWindowChange; + settings.autosave = Some(AutosaveSetting::OnWindowChange); }) }); item.is_dirty = true; @@ -6485,7 +6485,7 @@ mod tests { cx.focus_self(); SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = AutosaveSetting::OnFocusChange; + settings.autosave = Some(AutosaveSetting::OnFocusChange); }) }); item.is_dirty = true; @@ -6508,7 +6508,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = AutosaveSetting::AfterDelay { milliseconds: 500 }; + settings.autosave = Some(AutosaveSetting::AfterDelay { milliseconds: 500 }); }) }); item.is_dirty = true; @@ -6527,7 +6527,7 @@ mod tests { item.update(cx, |item, cx| { SettingsStore::update_global(cx, |settings, cx| { settings.update_user_settings::(cx, |settings| { - settings.autosave = AutosaveSetting::OnFocusChange; + settings.autosave = Some(AutosaveSetting::OnFocusChange); }) }); item.is_dirty = true; diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index f87840eb308388..52827c6941ae7d 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -5,58 +5,22 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Deserialize)] pub struct WorkspaceSettings { - /// Scale by which to zoom the active pane. - /// When set to 1.0, the active pane has the same size as others, - /// but when set to a larger value, the active pane takes up more space. pub active_pane_magnification: f32, - /// Direction to split horizontally. pub pane_split_direction_horizontal: PaneSplitDirectionHorizontal, - /// Direction to split vertically. pub pane_split_direction_vertical: PaneSplitDirectionVertical, - /// Centered layout related settings. pub centered_layout: CenteredLayoutSettings, - /// Whether or not to prompt the user to confirm before closing the application. pub confirm_quit: bool, - /// Whether or not to show the call status icon in the status bar. pub show_call_status_icon: bool, - /// When to automatically save edited buffers. pub autosave: AutosaveSetting, - /// Controls previous session restoration in freshly launched Zed instance. pub restore_on_startup: RestoreOnStartupBehavior, - /// The size of the workspace split drop targets on the outer edges. - /// Given as a fraction that will be multiplied by the smaller dimension of the workspace. pub drop_target_size: f32, - /// Whether to close the window when using 'close active item' on a workspace with no tabs pub when_closing_with_no_tabs: CloseWindowWhenNoItems, - /// Whether to use the system provided dialogs for Open and Save As. - /// When set to false, Zed will use the built-in keyboard-first pickers. pub use_system_path_prompts: bool, - /// Aliases for the command palette. When you type a key in this map, - /// it will be assumed to equal the value. pub command_aliases: HashMap, } -impl Default for WorkspaceSettings { - fn default() -> Self { - Self { - active_pane_magnification: 1.0, - pane_split_direction_horizontal: PaneSplitDirectionHorizontal::Up, - pane_split_direction_vertical: PaneSplitDirectionVertical::Left, - centered_layout: CenteredLayoutSettings::default(), - confirm_quit: false, - show_call_status_icon: true, - autosave: AutosaveSetting::Off, - restore_on_startup: RestoreOnStartupBehavior::default(), - drop_target_size: 0.2, - when_closing_with_no_tabs: CloseWindowWhenNoItems::default(), - use_system_path_prompts: true, - command_aliases: HashMap::default(), - } - } -} #[derive(Copy, Clone, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CloseWindowWhenNoItems { @@ -91,22 +55,77 @@ pub enum RestoreOnStartupBehavior { LastSession, } -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct WorkspaceSettingsContent { + /// Scale by which to zoom the active pane. + /// When set to 1.0, the active pane has the same size as others, + /// but when set to a larger value, the active pane takes up more space. + /// + /// Default: `1.0` + pub active_pane_magnification: Option, + // Direction to split horizontally. + // + // Default: "up" + pub pane_split_direction_horizontal: Option, + // Direction to split vertically. + // + // Default: "left" + pub pane_split_direction_vertical: Option, + // Centered layout related settings. + pub centered_layout: Option, + /// Whether or not to prompt the user to confirm before closing the application. + /// + /// Default: false + pub confirm_quit: Option, + /// Whether or not to show the call status icon in the status bar. + /// + /// Default: true + pub show_call_status_icon: Option, + /// When to automatically save edited buffers. + /// + /// Default: off + pub autosave: Option, + /// Controls previous session restoration in freshly launched Zed instance. + /// Values: none, last_workspace, last_session + /// Default: last_session + pub restore_on_startup: Option, + /// The size of the workspace split drop targets on the outer edges. + /// Given as a fraction that will be multiplied by the smaller dimension of the workspace. + /// + /// Default: `0.2` (20% of the smaller dimension of the workspace) + pub drop_target_size: Option, + /// Whether to close the window when using 'close active item' on a workspace with no tabs + /// + /// Default: auto ("on" on macOS, "off" otherwise) + pub when_closing_with_no_tabs: Option, + /// Whether to use the system provided dialogs for Open and Save As. + /// When set to false, Zed will use the built-in keyboard-first pickers. + /// + /// Default: true + pub use_system_path_prompts: Option, + /// Aliases for the command palette. When you type a key in this map, + /// it will be assumed to equal the value. + /// + /// Default: true + pub command_aliases: Option>, +} + +#[derive(Deserialize)] pub struct TabBarSettings { - /// Whether or not to show the tab bar in the editor. pub show: bool, - /// Whether or not to show the navigation history buttons in the tab bar. pub show_nav_history_buttons: bool, } -impl Default for TabBarSettings { - fn default() -> Self { - Self { - show_nav_history_buttons: true, - show: true, - } - } +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] +pub struct TabBarSettingsContent { + /// Whether or not to show the tab bar in the editor. + /// + /// Default: true + pub show: Option, + /// Whether or not to show the navigation history buttons in the tab bar. + /// + /// Default: true + pub show_nav_history_buttons: Option, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] @@ -144,26 +163,17 @@ pub struct CenteredLayoutSettings { /// /// Default: 0.2 pub left_padding: Option, - /// The relative width of the right padding of the central pane from the - /// workspace when the centered layout is used. + // The relative width of the right padding of the central pane from the + // workspace when the centered layout is used. /// /// Default: 0.2 pub right_padding: Option, } -impl Default for CenteredLayoutSettings { - fn default() -> Self { - Self { - left_padding: Some(0.2), - right_padding: Some(0.2), - } - } -} - impl Settings for WorkspaceSettings { const KEY: Option<&'static str> = None; - type FileContent = Self; + type FileContent = WorkspaceSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() @@ -173,7 +183,7 @@ impl Settings for WorkspaceSettings { impl Settings for TabBarSettings { const KEY: Option<&'static str> = Some("tab_bar"); - type FileContent = Self; + type FileContent = TabBarSettingsContent; fn load(sources: SettingsSources, _: &mut AppContext) -> Result { sources.json_merge() diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index 82be3a80287928..32851d963a1321 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -25,8 +25,7 @@ impl WorktreeSettings { } } -#[derive(Clone, Serialize, Deserialize, JsonSchema)] -#[serde(default)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] pub struct WorktreeSettingsContent { /// Completely ignore files matching globs from `file_scan_exclusions` /// @@ -40,42 +39,12 @@ pub struct WorktreeSettingsContent { /// "**/.classpath", /// "**/.settings" /// ] - pub file_scan_exclusions: Vec, + #[serde(default)] + pub file_scan_exclusions: Option>, /// Treat the files matching these globs as `.env` files. /// Default: [ "**/.env*" ] - pub private_files: Vec, -} - -impl Default for WorktreeSettingsContent { - fn default() -> Self { - Self { - private_files: [ - "**/.env*", - "**/*.pem", - "**/*.key", - "**/*.cert", - "**/*.crt", - "**/secrets.yml", - ] - .into_iter() - .map(str::to_owned) - .collect(), - file_scan_exclusions: [ - "**/.git", - "**/.svn", - "**/.hg", - "**/CVS", - "**/.DS_Store", - "**/Thumbs.db", - "**/.classpath", - "**/.settings", - ] - .into_iter() - .map(str::to_owned) - .collect(), - } - } + pub private_files: Option>, } impl Settings for WorktreeSettings { @@ -88,8 +57,8 @@ impl Settings for WorktreeSettings { _: &mut AppContext, ) -> anyhow::Result { let result: WorktreeSettingsContent = sources.json_merge()?; - let mut file_scan_exclusions = result.file_scan_exclusions; - let mut private_files = result.private_files; + let mut file_scan_exclusions = result.file_scan_exclusions.unwrap_or_default(); + let mut private_files = result.private_files.unwrap_or_default(); file_scan_exclusions.sort(); private_files.sort(); Ok(Self { diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 455bc62a79a874..929dc01c6d17e9 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -673,7 +673,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = Vec::new(); + project_settings.file_scan_exclusions = Some(Vec::new()); }); }); }); @@ -910,7 +910,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; + Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); }); }); }); @@ -945,7 +945,8 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = vec!["**/node_modules/**".to_string()]; + project_settings.file_scan_exclusions = + Some(vec!["**/node_modules/**".to_string()]); }); }); }); @@ -1008,11 +1009,11 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = vec![ + project_settings.file_scan_exclusions = Some(vec![ "**/.git".to_string(), "node_modules/".to_string(), "build_output".to_string(), - ]; + ]); }); }); }); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 9f670efcd7c5fe..93fee57ecdb654 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1996,7 +1996,7 @@ mod tests { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = - vec!["excluded_dir".to_string(), "**/.git".to_string()]; + Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]); }); }); }); From b9b62842f8e9ccd70dba5fd3fa2f3e4b0f2aaab3 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 12 Sep 2024 15:23:27 -0400 Subject: [PATCH 059/762] lsp: Treat unrooted paths as relative to the worktree root (#17769) gopls would send us watch patterns like `**/*.mod` and we'd fall back to watching `/`. Release Notes: - Fix file watching for go projects resorting to watching the fs root. Co-authored-by: Thorsten --- crates/project/src/lsp_store.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 307e86de450b7f..cdf1fa4be492dc 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -3499,7 +3499,7 @@ impl LspStore { .to_owned(); let path = if Path::new(path).components().next().is_none() { - Arc::from(Path::new("/")) + Arc::from(Path::new(worktree_root_path)) } else { PathBuf::from(path).into() }; From ee96d69e37790a37603ec0d558f0d3f08b9a6661 Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Fri, 13 Sep 2024 03:55:03 +0800 Subject: [PATCH 060/762] gpui: Fix CJK line wrap for GPUI text render (#17737) Release Notes: - N/A This changes is going to let GPUI render correct text wrapping for CJK characters. We was done this in PR #11296 for Editor, but this is also need support for other text renders. | Before | After | | --- | --- | | SCR-20240912-jtvo | image | --- crates/gpui/examples/text_wrapper.rs | 3 ++- crates/gpui/src/text_system/line_layout.rs | 17 ++++++++++++++--- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/crates/gpui/examples/text_wrapper.rs b/crates/gpui/examples/text_wrapper.rs index 063d60d1983d79..cb064259286ef8 100644 --- a/crates/gpui/examples/text_wrapper.rs +++ b/crates/gpui/examples/text_wrapper.rs @@ -4,7 +4,7 @@ struct HelloWorld {} impl Render for HelloWorld { fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { - let text = "The longest word in any of the major English language 以及中文的测试 dictionaries is pneumonoultramicroscopicsilicovolcanoconiosis, a word that refers to a lung disease contracted from the inhalation of very fine silica particles, specifically from a volcano; medically, it is the same as silicosis."; + let text = "The longest word 你好世界这段是中文,こんにちはこの段落は日本語です in any of the major English language dictionaries is pneumonoultramicroscopicsilicovolcanoconiosis, a word that refers to a lung disease contracted from the inhalation of very fine silica particles, specifically from a volcano; medically, it is the same as silicosis."; div() .id("page") .size_full() @@ -40,6 +40,7 @@ impl Render for HelloWorld { .border_1() .border_color(gpui::red()) .text_ellipsis() + .w_full() .child("A short text in normal div"), ), ) diff --git a/crates/gpui/src/text_system/line_layout.rs b/crates/gpui/src/text_system/line_layout.rs index af01eb70ede1c3..7e5a43dee881f4 100644 --- a/crates/gpui/src/text_system/line_layout.rs +++ b/crates/gpui/src/text_system/line_layout.rs @@ -9,6 +9,8 @@ use std::{ sync::Arc, }; +use super::LineWrapper; + /// A laid out and styled line of text #[derive(Default, Debug)] pub struct LineLayout { @@ -152,9 +154,18 @@ impl LineLayout { continue; } - if prev_ch == ' ' && ch != ' ' && first_non_whitespace_ix.is_some() { - last_candidate_ix = Some(boundary); - last_candidate_x = x; + // Here is very similar to `LineWrapper::wrap_line` to determine text wrapping, + // but there are some differences, so we have to duplicate the code here. + if LineWrapper::is_word_char(ch) { + if prev_ch == ' ' && ch != ' ' && first_non_whitespace_ix.is_some() { + last_candidate_ix = Some(boundary); + last_candidate_x = x; + } + } else { + if ch != ' ' && first_non_whitespace_ix.is_some() { + last_candidate_ix = Some(boundary); + last_candidate_x = x; + } } if ch != ' ' && first_non_whitespace_ix.is_none() { From af819bf661242a3e6c6f54782f36b7ac68ab0294 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Fri, 13 Sep 2024 04:14:53 +0800 Subject: [PATCH 061/762] windows: Implement `fs::trash_file` and `fs::trash_dir` (#17711) https://github.com/user-attachments/assets/43370cee-26a5-4d27-b86f-656127e03b4a Release Notes: - N/A --- Cargo.toml | 1 + crates/fs/src/fs.rs | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/Cargo.toml b/Cargo.toml index 23b17fd2916957..79f5ce2dcf54d2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -483,6 +483,7 @@ version = "0.58" features = [ "implement", "Foundation_Numerics", + "Storage", "System", "System_Threading", "UI_ViewManagement", diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index a463773e7e527d..0ec5a4c601371b 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -342,6 +342,24 @@ impl Fs for RealFs { } } + #[cfg(target_os = "windows")] + async fn trash_file(&self, path: &Path, _options: RemoveOptions) -> Result<()> { + use windows::{ + core::HSTRING, + Storage::{StorageDeleteOption, StorageFile}, + }; + // todo(windows) + // When new version of `windows-rs` release, make this operation `async` + let path = path.canonicalize()?.to_string_lossy().to_string(); + let path_str = path.trim_start_matches("\\\\?\\"); + if path_str.is_empty() { + anyhow::bail!("File path is empty!"); + } + let file = StorageFile::GetFileFromPathAsync(&HSTRING::from(path_str))?.get()?; + file.DeleteAsync(StorageDeleteOption::Default)?.get()?; + Ok(()) + } + #[cfg(target_os = "macos")] async fn trash_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> { self.trash_file(path, options).await @@ -352,6 +370,25 @@ impl Fs for RealFs { self.trash_file(path, options).await } + #[cfg(target_os = "windows")] + async fn trash_dir(&self, path: &Path, _options: RemoveOptions) -> Result<()> { + use windows::{ + core::HSTRING, + Storage::{StorageDeleteOption, StorageFolder}, + }; + + let path = path.canonicalize()?.to_string_lossy().to_string(); + let path_str = path.trim_start_matches("\\\\?\\"); + if path_str.is_empty() { + anyhow::bail!("Folder path is empty!"); + } + // todo(windows) + // When new version of `windows-rs` release, make this operation `async` + let folder = StorageFolder::GetFolderFromPathAsync(&HSTRING::from(path_str))?.get()?; + folder.DeleteAsync(StorageDeleteOption::Default)?.get()?; + Ok(()) + } + async fn open_sync(&self, path: &Path) -> Result> { Ok(Box::new(std::fs::File::open(path)?)) } From 461812d7b6de0064e2bffd224f82a95351d0f1cb Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Fri, 13 Sep 2024 04:15:20 +0800 Subject: [PATCH 062/762] windows: Use the existing `open_target` function for `platform::open_with_system` (#17705) Release Notes: - N/A --- crates/gpui/src/platform/windows/platform.rs | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index f8b3924e6282b1..934d9336d2f6c2 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -401,14 +401,19 @@ impl Platform for WindowsPlatform { } fn open_with_system(&self, path: &Path) { - let executor = self.background_executor().clone(); - let path = path.to_owned(); - executor + let Ok(full_path) = path.canonicalize() else { + log::error!("unable to parse file full path: {}", path.display()); + return; + }; + self.background_executor() .spawn(async move { - let _ = std::process::Command::new("cmd") - .args(&["/c", "start", "", path.to_str().expect("path to string")]) - .spawn() - .expect("Failed to open file"); + let Some(full_path_str) = full_path.to_str() else { + return; + }; + if full_path_str.is_empty() { + return; + }; + open_target(full_path_str); }) .detach(); } From 3613ebd93c137578d947b482fe108e81313a8b47 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Fri, 13 Sep 2024 04:55:59 +0800 Subject: [PATCH 063/762] editor: Fix an error when cut with vim visual line select (#17591) Becuause in vim visual mode, we will always select next char, hit [here](https://github.com/zed-industries/zed/blob/66ef31882341852229c74996867916fbd4a2fe2a/crates/vim/src/visual.rs#L174), when using editor method for `cut` this selection, will hit this error. Closes #17585 Release Notes: - N/A --------- Co-authored-by: Conrad Irwin --- crates/editor/src/editor.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 4792c6b2cb889d..515cde1908abb5 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6671,7 +6671,11 @@ impl Editor { let is_entire_line = selection.is_empty() || self.selections.line_mode; if is_entire_line { selection.start = Point::new(selection.start.row, 0); - selection.end = cmp::min(max_point, Point::new(selection.end.row + 1, 0)); + if !selection.is_empty() && selection.end.column == 0 { + selection.end = cmp::min(max_point, selection.end); + } else { + selection.end = cmp::min(max_point, Point::new(selection.end.row + 1, 0)); + } selection.goal = SelectionGoal::None; } if is_first { From 3aeea93847acd4c28f97d2c4de35364a98cb98e4 Mon Sep 17 00:00:00 2001 From: Albert Marashi Date: Fri, 13 Sep 2024 21:41:27 +0930 Subject: [PATCH 064/762] typescript: Highlight `is` predicate keyword & `...` spread pattern (#17787) Release Notes: - Fixed the `is` and `...` highlights for TypeScript --- crates/languages/src/tsx/highlights.scm | 1 + crates/languages/src/typescript/highlights.scm | 2 ++ 2 files changed, 3 insertions(+) diff --git a/crates/languages/src/tsx/highlights.scm b/crates/languages/src/tsx/highlights.scm index 296a66c10a6706..bbdd83bb4d4e94 100644 --- a/crates/languages/src/tsx/highlights.scm +++ b/crates/languages/src/tsx/highlights.scm @@ -181,6 +181,7 @@ "import" "in" "instanceof" + "is" "let" "new" "of" diff --git a/crates/languages/src/typescript/highlights.scm b/crates/languages/src/typescript/highlights.scm index 19def8d93dd00c..eedcf79aedac77 100644 --- a/crates/languages/src/typescript/highlights.scm +++ b/crates/languages/src/typescript/highlights.scm @@ -100,6 +100,7 @@ ] @punctuation.delimiter [ + "..." "-" "--" "-=" @@ -181,6 +182,7 @@ "import" "in" "instanceof" + "is" "let" "new" "of" From 93a3e8bc9478572cc07404d9ed3556b6842f00f2 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 13 Sep 2024 10:54:23 -0400 Subject: [PATCH 065/762] zed_extension_api: Fork new version of extension API (#17795) This PR forks a new version of the `zed_extension_api` in preparation for new changes. Release Notes: - N/A --------- Co-authored-by: Max --- Cargo.lock | 66 +-- crates/extension/build.rs | 9 +- crates/extension/src/wasm_host/wit.rs | 85 ++- .../src/wasm_host/wit/since_v0_1_0.rs | 229 +++----- .../src/wasm_host/wit/since_v0_2_0.rs | 551 ++++++++++++++++++ crates/extension_api/Cargo.toml | 5 +- .../extension_api/wit/since_v0.2.0/common.wit | 9 + .../wit/since_v0.2.0/extension.wit | 147 +++++ .../extension_api/wit/since_v0.2.0/github.wit | 33 ++ .../wit/since_v0.2.0/http-client.wit | 67 +++ crates/extension_api/wit/since_v0.2.0/lsp.wit | 83 +++ .../extension_api/wit/since_v0.2.0/nodejs.wit | 13 + .../wit/since_v0.2.0/platform.wit | 24 + .../wit/since_v0.2.0/settings.rs | 29 + .../wit/since_v0.2.0/slash-command.wit | 41 ++ extensions/test-extension/Cargo.toml | 2 +- 16 files changed, 1216 insertions(+), 177 deletions(-) create mode 100644 crates/extension/src/wasm_host/wit/since_v0_2_0.rs create mode 100644 crates/extension_api/wit/since_v0.2.0/common.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/extension.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/github.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/http-client.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/lsp.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/nodejs.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/platform.wit create mode 100644 crates/extension_api/wit/since_v0.2.0/settings.rs create mode 100644 crates/extension_api/wit/since_v0.2.0/slash-command.wit diff --git a/Cargo.lock b/Cargo.lock index 36691e72ab772a..5eaf3ddde1fda5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7873,7 +7873,7 @@ name = "perplexity" version = "0.1.0" dependencies = [ "serde", - "zed_extension_api 0.1.0", + "zed_extension_api 0.2.0", ] [[package]] @@ -10254,7 +10254,7 @@ dependencies = [ name = "slash_commands_example" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] @@ -14317,68 +14317,70 @@ name = "zed_astro" version = "0.1.0" dependencies = [ "serde", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_clojure" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_csharp" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_dart" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_deno" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_elixir" version = "0.0.9" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_elm" version = "0.0.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_emmet" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_erlang" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_extension_api" version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "594fd10dd0f2f853eb243e2425e7c95938cef49adb81d9602921d002c5e6d9d9" dependencies = [ "serde", "serde_json", @@ -14387,9 +14389,7 @@ dependencies = [ [[package]] name = "zed_extension_api" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "594fd10dd0f2f853eb243e2425e7c95938cef49adb81d9602921d002c5e6d9d9" +version = "0.2.0" dependencies = [ "serde", "serde_json", @@ -14401,77 +14401,77 @@ name = "zed_gleam" version = "0.2.0" dependencies = [ "html_to_markdown 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_glsl" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_haskell" version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_html" version = "0.1.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_lua" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_ocaml" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_php" version = "0.2.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_prisma" version = "0.0.3" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_purescript" version = "0.0.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_ruby" version = "0.2.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_ruff" version = "0.0.2" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] @@ -14479,42 +14479,42 @@ name = "zed_snippets" version = "0.0.5" dependencies = [ "serde_json", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_svelte" version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_terraform" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_test_extension" version = "0.1.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.2.0", ] [[package]] name = "zed_toml" version = "0.1.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_uiua" version = "0.0.1" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] @@ -14522,14 +14522,14 @@ name = "zed_vue" version = "0.1.0" dependencies = [ "serde", - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] name = "zed_zig" version = "0.3.0" dependencies = [ - "zed_extension_api 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "zed_extension_api 0.1.0", ] [[package]] diff --git a/crates/extension/build.rs b/crates/extension/build.rs index c5f94abaa868bf..f2c2b19998d054 100644 --- a/crates/extension/build.rs +++ b/crates/extension/build.rs @@ -6,17 +6,21 @@ fn main() -> Result<(), Box> { copy_extension_api_rust_files() } -// rust-analyzer doesn't support include! for files from outside the crate. -// Copy them to the OUT_DIR, so we can include them from there, which is supported. +/// rust-analyzer doesn't support include! for files from outside the crate. +/// Copy them to the OUT_DIR, so we can include them from there, which is supported. fn copy_extension_api_rust_files() -> Result<(), Box> { let out_dir = env::var("OUT_DIR")?; let input_dir = PathBuf::from("../extension_api/wit"); let output_dir = PathBuf::from(out_dir); + println!("cargo:rerun-if-changed={}", input_dir.display()); + for entry in fs::read_dir(&input_dir)? { let entry = entry?; let path = entry.path(); if path.is_dir() { + println!("cargo:rerun-if-changed={}", path.display()); + for subentry in fs::read_dir(&path)? { let subentry = subentry?; let subpath = subentry.path(); @@ -26,7 +30,6 @@ fn copy_extension_api_rust_files() -> Result<(), Box> { fs::create_dir_all(destination.parent().unwrap())?; fs::copy(&subpath, &destination)?; - println!("cargo:rerun-if-changed={}", subpath.display()); } } } else if path.extension() == Some(std::ffi::OsStr::new("rs")) { diff --git a/crates/extension/src/wasm_host/wit.rs b/crates/extension/src/wasm_host/wit.rs index 7c7d71be3a0fe5..1c3cdd77f64eea 100644 --- a/crates/extension/src/wasm_host/wit.rs +++ b/crates/extension/src/wasm_host/wit.rs @@ -2,9 +2,10 @@ mod since_v0_0_1; mod since_v0_0_4; mod since_v0_0_6; mod since_v0_1_0; +mod since_v0_2_0; use indexed_docs::IndexedDocsDatabase; use release_channel::ReleaseChannel; -use since_v0_1_0 as latest; +use since_v0_2_0 as latest; use super::{wasm_engine, WasmState}; use anyhow::{anyhow, Context, Result}; @@ -52,10 +53,16 @@ pub fn wasm_api_version_range(release_channel: ReleaseChannel) -> RangeInclusive // Note: The release channel can be used to stage a new version of the extension API. let _ = release_channel; - since_v0_0_1::MIN_VERSION..=latest::MAX_VERSION + let max_version = match release_channel { + ReleaseChannel::Dev | ReleaseChannel::Nightly => latest::MAX_VERSION, + ReleaseChannel::Stable | ReleaseChannel::Preview => since_v0_1_0::MAX_VERSION, + }; + + since_v0_0_1::MIN_VERSION..=max_version } pub enum Extension { + V020(since_v0_2_0::Extension), V010(since_v0_1_0::Extension), V006(since_v0_0_6::Extension), V004(since_v0_0_4::Extension), @@ -72,11 +79,25 @@ impl Extension { // Note: The release channel can be used to stage a new version of the extension API. let _ = release_channel; - if version >= latest::MIN_VERSION { + let allow_latest_version = match release_channel { + ReleaseChannel::Dev | ReleaseChannel::Nightly => true, + ReleaseChannel::Stable | ReleaseChannel::Preview => false, + }; + + if allow_latest_version && version >= latest::MIN_VERSION { let (extension, instance) = latest::Extension::instantiate_async(store, component, latest::linker()) .await .context("failed to instantiate wasm extension")?; + Ok((Self::V020(extension), instance)) + } else if version >= since_v0_1_0::MIN_VERSION { + let (extension, instance) = since_v0_1_0::Extension::instantiate_async( + store, + component, + since_v0_1_0::linker(), + ) + .await + .context("failed to instantiate wasm extension")?; Ok((Self::V010(extension), instance)) } else if version >= since_v0_0_6::MIN_VERSION { let (extension, instance) = since_v0_0_6::Extension::instantiate_async( @@ -110,6 +131,7 @@ impl Extension { pub async fn call_init_extension(&self, store: &mut Store) -> Result<()> { match self { + Extension::V020(ext) => ext.call_init_extension(store).await, Extension::V010(ext) => ext.call_init_extension(store).await, Extension::V006(ext) => ext.call_init_extension(store).await, Extension::V004(ext) => ext.call_init_extension(store).await, @@ -125,10 +147,14 @@ impl Extension { resource: Resource>, ) -> Result> { match self { - Extension::V010(ext) => { + Extension::V020(ext) => { ext.call_language_server_command(store, &language_server_id.0, resource) .await } + Extension::V010(ext) => Ok(ext + .call_language_server_command(store, &language_server_id.0, resource) + .await? + .map(|command| command.into())), Extension::V006(ext) => Ok(ext .call_language_server_command(store, &language_server_id.0, resource) .await? @@ -152,6 +178,14 @@ impl Extension { resource: Resource>, ) -> Result, String>> { match self { + Extension::V020(ext) => { + ext.call_language_server_initialization_options( + store, + &language_server_id.0, + resource, + ) + .await + } Extension::V010(ext) => { ext.call_language_server_initialization_options( store, @@ -190,6 +224,14 @@ impl Extension { resource: Resource>, ) -> Result, String>> { match self { + Extension::V020(ext) => { + ext.call_language_server_workspace_configuration( + store, + &language_server_id.0, + resource, + ) + .await + } Extension::V010(ext) => { ext.call_language_server_workspace_configuration( store, @@ -217,10 +259,19 @@ impl Extension { completions: Vec, ) -> Result>, String>> { match self { - Extension::V010(ext) => { + Extension::V020(ext) => { ext.call_labels_for_completions(store, &language_server_id.0, &completions) .await } + Extension::V010(ext) => Ok(ext + .call_labels_for_completions(store, &language_server_id.0, &completions) + .await? + .map(|labels| { + labels + .into_iter() + .map(|label| label.map(Into::into)) + .collect() + })), Extension::V006(ext) => Ok(ext .call_labels_for_completions(store, &language_server_id.0, &completions) .await? @@ -241,10 +292,19 @@ impl Extension { symbols: Vec, ) -> Result>, String>> { match self { - Extension::V010(ext) => { + Extension::V020(ext) => { ext.call_labels_for_symbols(store, &language_server_id.0, &symbols) .await } + Extension::V010(ext) => Ok(ext + .call_labels_for_symbols(store, &language_server_id.0, &symbols) + .await? + .map(|labels| { + labels + .into_iter() + .map(|label| label.map(Into::into)) + .collect() + })), Extension::V006(ext) => Ok(ext .call_labels_for_symbols(store, &language_server_id.0, &symbols) .await? @@ -265,6 +325,10 @@ impl Extension { arguments: &[String], ) -> Result, String>> { match self { + Extension::V020(ext) => { + ext.call_complete_slash_command_argument(store, command, arguments) + .await + } Extension::V010(ext) => { ext.call_complete_slash_command_argument(store, command, arguments) .await @@ -281,6 +345,10 @@ impl Extension { resource: Option>>, ) -> Result> { match self { + Extension::V020(ext) => { + ext.call_run_slash_command(store, command, arguments, resource) + .await + } Extension::V010(ext) => { ext.call_run_slash_command(store, command, arguments, resource) .await @@ -297,6 +365,7 @@ impl Extension { provider: &str, ) -> Result, String>> { match self { + Extension::V020(ext) => ext.call_suggest_docs_packages(store, provider).await, Extension::V010(ext) => ext.call_suggest_docs_packages(store, provider).await, Extension::V001(_) | Extension::V004(_) | Extension::V006(_) => Err(anyhow!( "`suggest_docs_packages` not available prior to v0.1.0" @@ -312,6 +381,10 @@ impl Extension { database: Resource>, ) -> Result> { match self { + Extension::V020(ext) => { + ext.call_index_docs(store, provider, package_name, database) + .await + } Extension::V010(ext) => { ext.call_index_docs(store, provider, package_name, database) .await diff --git a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs index 337bb8afb04a1d..88d860391a477b 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs @@ -16,13 +16,14 @@ use language::{ use project::project_settings::ProjectSettings; use semantic_version::SemanticVersion; use std::{ - env, path::{Path, PathBuf}, sync::{Arc, OnceLock}, }; use util::maybe; use wasmtime::component::{Linker, Resource}; +use super::latest; + pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 1, 0); pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 1, 0); @@ -33,7 +34,12 @@ wasmtime::component::bindgen!({ with: { "worktree": ExtensionWorktree, "key-value-store": ExtensionKeyValueStore, - "zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream + "zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream, + "zed:extension/github": latest::zed::extension::github, + "zed:extension/lsp": latest::zed::extension::lsp, + "zed:extension/nodejs": latest::zed::extension::nodejs, + "zed:extension/platform": latest::zed::extension::platform, + "zed:extension/slash-command": latest::zed::extension::slash_command, }, }); @@ -49,7 +55,94 @@ pub type ExtensionHttpResponseStream = Arc &'static Linker { static LINKER: OnceLock> = OnceLock::new(); - LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker)) + LINKER.get_or_init(|| { + super::new_linker(|linker, f| { + Extension::add_to_linker(linker, f)?; + latest::zed::extension::github::add_to_linker(linker, f)?; + latest::zed::extension::nodejs::add_to_linker(linker, f)?; + latest::zed::extension::platform::add_to_linker(linker, f)?; + latest::zed::extension::slash_command::add_to_linker(linker, f)?; + Ok(()) + }) + }) +} + +impl From for latest::Command { + fn from(value: Command) -> Self { + Self { + command: value.command, + args: value.args, + env: value.env, + } + } +} + +impl From for latest::SettingsLocation { + fn from(value: SettingsLocation) -> Self { + Self { + worktree_id: value.worktree_id, + path: value.path, + } + } +} + +impl From for latest::LanguageServerInstallationStatus { + fn from(value: LanguageServerInstallationStatus) -> Self { + match value { + LanguageServerInstallationStatus::None => Self::None, + LanguageServerInstallationStatus::Downloading => Self::Downloading, + LanguageServerInstallationStatus::CheckingForUpdate => Self::CheckingForUpdate, + LanguageServerInstallationStatus::Failed(message) => Self::Failed(message), + } + } +} + +impl From for latest::DownloadedFileType { + fn from(value: DownloadedFileType) -> Self { + match value { + DownloadedFileType::Gzip => Self::Gzip, + DownloadedFileType::GzipTar => Self::GzipTar, + DownloadedFileType::Zip => Self::Zip, + DownloadedFileType::Uncompressed => Self::Uncompressed, + } + } +} + +impl From for latest::Range { + fn from(value: Range) -> Self { + Self { + start: value.start, + end: value.end, + } + } +} + +impl From for latest::CodeLabelSpan { + fn from(value: CodeLabelSpan) -> Self { + match value { + CodeLabelSpan::CodeRange(range) => Self::CodeRange(range.into()), + CodeLabelSpan::Literal(literal) => Self::Literal(literal.into()), + } + } +} + +impl From for latest::CodeLabelSpanLiteral { + fn from(value: CodeLabelSpanLiteral) -> Self { + Self { + text: value.text, + highlight_name: value.highlight_name, + } + } +} + +impl From for latest::CodeLabel { + fn from(value: CodeLabel) -> Self { + Self { + code: value.code, + spans: value.spans.into_iter().map(Into::into).collect(), + filter_range: value.filter_range.into(), + } + } } #[async_trait] @@ -251,136 +344,6 @@ async fn convert_response( Ok(extension_response) } -#[async_trait] -impl nodejs::Host for WasmState { - async fn node_binary_path(&mut self) -> wasmtime::Result> { - self.host - .node_runtime - .binary_path() - .await - .map(|path| path.to_string_lossy().to_string()) - .to_wasmtime_result() - } - - async fn npm_package_latest_version( - &mut self, - package_name: String, - ) -> wasmtime::Result> { - self.host - .node_runtime - .npm_package_latest_version(&package_name) - .await - .to_wasmtime_result() - } - - async fn npm_package_installed_version( - &mut self, - package_name: String, - ) -> wasmtime::Result, String>> { - self.host - .node_runtime - .npm_package_installed_version(&self.work_dir(), &package_name) - .await - .to_wasmtime_result() - } - - async fn npm_install_package( - &mut self, - package_name: String, - version: String, - ) -> wasmtime::Result> { - self.host - .node_runtime - .npm_install_packages(&self.work_dir(), &[(&package_name, &version)]) - .await - .to_wasmtime_result() - } -} - -#[async_trait] -impl lsp::Host for WasmState {} - -impl From<::http_client::github::GithubRelease> for github::GithubRelease { - fn from(value: ::http_client::github::GithubRelease) -> Self { - Self { - version: value.tag_name, - assets: value.assets.into_iter().map(Into::into).collect(), - } - } -} - -impl From<::http_client::github::GithubReleaseAsset> for github::GithubReleaseAsset { - fn from(value: ::http_client::github::GithubReleaseAsset) -> Self { - Self { - name: value.name, - download_url: value.browser_download_url, - } - } -} - -#[async_trait] -impl github::Host for WasmState { - async fn latest_github_release( - &mut self, - repo: String, - options: github::GithubReleaseOptions, - ) -> wasmtime::Result> { - maybe!(async { - let release = ::http_client::github::latest_github_release( - &repo, - options.require_assets, - options.pre_release, - self.host.http_client.clone(), - ) - .await?; - Ok(release.into()) - }) - .await - .to_wasmtime_result() - } - - async fn github_release_by_tag_name( - &mut self, - repo: String, - tag: String, - ) -> wasmtime::Result> { - maybe!(async { - let release = ::http_client::github::get_release_by_tag_name( - &repo, - &tag, - self.host.http_client.clone(), - ) - .await?; - Ok(release.into()) - }) - .await - .to_wasmtime_result() - } -} - -#[async_trait] -impl platform::Host for WasmState { - async fn current_platform(&mut self) -> Result<(platform::Os, platform::Architecture)> { - Ok(( - match env::consts::OS { - "macos" => platform::Os::Mac, - "linux" => platform::Os::Linux, - "windows" => platform::Os::Windows, - _ => panic!("unsupported os"), - }, - match env::consts::ARCH { - "aarch64" => platform::Architecture::Aarch64, - "x86" => platform::Architecture::X86, - "x86_64" => platform::Architecture::X8664, - _ => panic!("unsupported architecture"), - }, - )) - } -} - -#[async_trait] -impl slash_command::Host for WasmState {} - #[async_trait] impl ExtensionImports for WasmState { async fn get_settings( diff --git a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs new file mode 100644 index 00000000000000..7fa79c2544475b --- /dev/null +++ b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs @@ -0,0 +1,551 @@ +use crate::wasm_host::{wit::ToWasmtimeResult, WasmState}; +use ::http_client::AsyncBody; +use ::settings::{Settings, WorktreeId}; +use anyhow::{anyhow, bail, Context, Result}; +use async_compression::futures::bufread::GzipDecoder; +use async_tar::Archive; +use async_trait::async_trait; +use futures::{io::BufReader, FutureExt as _}; +use futures::{lock::Mutex, AsyncReadExt}; +use indexed_docs::IndexedDocsDatabase; +use isahc::config::{Configurable, RedirectPolicy}; +use language::LanguageName; +use language::{ + language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, +}; +use project::project_settings::ProjectSettings; +use semantic_version::SemanticVersion; +use std::{ + env, + path::{Path, PathBuf}, + sync::{Arc, OnceLock}, +}; +use util::maybe; +use wasmtime::component::{Linker, Resource}; + +pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 2, 0); +pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 2, 0); + +wasmtime::component::bindgen!({ + async: true, + trappable_imports: true, + path: "../extension_api/wit/since_v0.2.0", + with: { + "worktree": ExtensionWorktree, + "key-value-store": ExtensionKeyValueStore, + "zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream + }, +}); + +pub use self::zed::extension::*; + +mod settings { + include!(concat!(env!("OUT_DIR"), "/since_v0.2.0/settings.rs")); +} + +pub type ExtensionWorktree = Arc; +pub type ExtensionKeyValueStore = Arc; +pub type ExtensionHttpResponseStream = Arc>>; + +pub fn linker() -> &'static Linker { + static LINKER: OnceLock> = OnceLock::new(); + LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker)) +} + +#[async_trait] +impl HostKeyValueStore for WasmState { + async fn insert( + &mut self, + kv_store: Resource, + key: String, + value: String, + ) -> wasmtime::Result> { + let kv_store = self.table.get(&kv_store)?; + kv_store.insert(key, value).await.to_wasmtime_result() + } + + fn drop(&mut self, _worktree: Resource) -> Result<()> { + // We only ever hand out borrows of key-value stores. + Ok(()) + } +} + +#[async_trait] +impl HostWorktree for WasmState { + async fn id( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.worktree_id().to_proto()) + } + + async fn root_path( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.worktree_root_path().to_string_lossy().to_string()) + } + + async fn read_text_file( + &mut self, + delegate: Resource>, + path: String, + ) -> wasmtime::Result> { + let delegate = self.table.get(&delegate)?; + Ok(delegate + .read_text_file(path.into()) + .await + .map_err(|error| error.to_string())) + } + + async fn shell_env( + &mut self, + delegate: Resource>, + ) -> wasmtime::Result { + let delegate = self.table.get(&delegate)?; + Ok(delegate.shell_env().await.into_iter().collect()) + } + + async fn which( + &mut self, + delegate: Resource>, + binary_name: String, + ) -> wasmtime::Result> { + let delegate = self.table.get(&delegate)?; + Ok(delegate + .which(binary_name.as_ref()) + .await + .map(|path| path.to_string_lossy().to_string())) + } + + fn drop(&mut self, _worktree: Resource) -> Result<()> { + // We only ever hand out borrows of worktrees. + Ok(()) + } +} + +#[async_trait] +impl common::Host for WasmState {} + +#[async_trait] +impl http_client::Host for WasmState { + async fn fetch( + &mut self, + request: http_client::HttpRequest, + ) -> wasmtime::Result> { + maybe!(async { + let url = &request.url; + let request = convert_request(&request)?; + let mut response = self.host.http_client.send(request).await?; + + if response.status().is_client_error() || response.status().is_server_error() { + bail!("failed to fetch '{url}': status code {}", response.status()) + } + convert_response(&mut response).await + }) + .await + .to_wasmtime_result() + } + + async fn fetch_stream( + &mut self, + request: http_client::HttpRequest, + ) -> wasmtime::Result, String>> { + let request = convert_request(&request)?; + let response = self.host.http_client.send(request); + maybe!(async { + let response = response.await?; + let stream = Arc::new(Mutex::new(response)); + let resource = self.table.push(stream)?; + Ok(resource) + }) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl http_client::HostHttpResponseStream for WasmState { + async fn next_chunk( + &mut self, + resource: Resource, + ) -> wasmtime::Result>, String>> { + let stream = self.table.get(&resource)?.clone(); + maybe!(async move { + let mut response = stream.lock().await; + let mut buffer = vec![0; 8192]; // 8KB buffer + let bytes_read = response.body_mut().read(&mut buffer).await?; + if bytes_read == 0 { + Ok(None) + } else { + buffer.truncate(bytes_read); + Ok(Some(buffer)) + } + }) + .await + .to_wasmtime_result() + } + + fn drop(&mut self, _resource: Resource) -> Result<()> { + Ok(()) + } +} + +impl From for ::http_client::Method { + fn from(value: http_client::HttpMethod) -> Self { + match value { + http_client::HttpMethod::Get => Self::GET, + http_client::HttpMethod::Post => Self::POST, + http_client::HttpMethod::Put => Self::PUT, + http_client::HttpMethod::Delete => Self::DELETE, + http_client::HttpMethod::Head => Self::HEAD, + http_client::HttpMethod::Options => Self::OPTIONS, + http_client::HttpMethod::Patch => Self::PATCH, + } + } +} + +fn convert_request( + extension_request: &http_client::HttpRequest, +) -> Result<::http_client::Request, anyhow::Error> { + let mut request = ::http_client::Request::builder() + .method(::http_client::Method::from(extension_request.method)) + .uri(&extension_request.url) + .redirect_policy(match extension_request.redirect_policy { + http_client::RedirectPolicy::NoFollow => RedirectPolicy::None, + http_client::RedirectPolicy::FollowLimit(limit) => RedirectPolicy::Limit(limit), + http_client::RedirectPolicy::FollowAll => RedirectPolicy::Follow, + }); + for (key, value) in &extension_request.headers { + request = request.header(key, value); + } + let body = extension_request + .body + .clone() + .map(AsyncBody::from) + .unwrap_or_default(); + request.body(body).map_err(anyhow::Error::from) +} + +async fn convert_response( + response: &mut ::http_client::Response, +) -> Result { + let mut extension_response = http_client::HttpResponse { + body: Vec::new(), + headers: Vec::new(), + }; + + for (key, value) in response.headers() { + extension_response + .headers + .push((key.to_string(), value.to_str().unwrap_or("").to_string())); + } + + response + .body_mut() + .read_to_end(&mut extension_response.body) + .await?; + + Ok(extension_response) +} + +#[async_trait] +impl nodejs::Host for WasmState { + async fn node_binary_path(&mut self) -> wasmtime::Result> { + self.host + .node_runtime + .binary_path() + .await + .map(|path| path.to_string_lossy().to_string()) + .to_wasmtime_result() + } + + async fn npm_package_latest_version( + &mut self, + package_name: String, + ) -> wasmtime::Result> { + self.host + .node_runtime + .npm_package_latest_version(&package_name) + .await + .to_wasmtime_result() + } + + async fn npm_package_installed_version( + &mut self, + package_name: String, + ) -> wasmtime::Result, String>> { + self.host + .node_runtime + .npm_package_installed_version(&self.work_dir(), &package_name) + .await + .to_wasmtime_result() + } + + async fn npm_install_package( + &mut self, + package_name: String, + version: String, + ) -> wasmtime::Result> { + self.host + .node_runtime + .npm_install_packages(&self.work_dir(), &[(&package_name, &version)]) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl lsp::Host for WasmState {} + +impl From<::http_client::github::GithubRelease> for github::GithubRelease { + fn from(value: ::http_client::github::GithubRelease) -> Self { + Self { + version: value.tag_name, + assets: value.assets.into_iter().map(Into::into).collect(), + } + } +} + +impl From<::http_client::github::GithubReleaseAsset> for github::GithubReleaseAsset { + fn from(value: ::http_client::github::GithubReleaseAsset) -> Self { + Self { + name: value.name, + download_url: value.browser_download_url, + } + } +} + +#[async_trait] +impl github::Host for WasmState { + async fn latest_github_release( + &mut self, + repo: String, + options: github::GithubReleaseOptions, + ) -> wasmtime::Result> { + maybe!(async { + let release = ::http_client::github::latest_github_release( + &repo, + options.require_assets, + options.pre_release, + self.host.http_client.clone(), + ) + .await?; + Ok(release.into()) + }) + .await + .to_wasmtime_result() + } + + async fn github_release_by_tag_name( + &mut self, + repo: String, + tag: String, + ) -> wasmtime::Result> { + maybe!(async { + let release = ::http_client::github::get_release_by_tag_name( + &repo, + &tag, + self.host.http_client.clone(), + ) + .await?; + Ok(release.into()) + }) + .await + .to_wasmtime_result() + } +} + +#[async_trait] +impl platform::Host for WasmState { + async fn current_platform(&mut self) -> Result<(platform::Os, platform::Architecture)> { + Ok(( + match env::consts::OS { + "macos" => platform::Os::Mac, + "linux" => platform::Os::Linux, + "windows" => platform::Os::Windows, + _ => panic!("unsupported os"), + }, + match env::consts::ARCH { + "aarch64" => platform::Architecture::Aarch64, + "x86" => platform::Architecture::X86, + "x86_64" => platform::Architecture::X8664, + _ => panic!("unsupported architecture"), + }, + )) + } +} + +#[async_trait] +impl slash_command::Host for WasmState {} + +#[async_trait] +impl ExtensionImports for WasmState { + async fn get_settings( + &mut self, + location: Option, + category: String, + key: Option, + ) -> wasmtime::Result> { + self.on_main_thread(|cx| { + async move { + let location = location + .as_ref() + .map(|location| ::settings::SettingsLocation { + worktree_id: WorktreeId::from_proto(location.worktree_id), + path: Path::new(&location.path), + }); + + cx.update(|cx| match category.as_str() { + "language" => { + let key = key.map(|k| LanguageName::new(&k)); + let settings = + AllLanguageSettings::get(location, cx).language(key.as_ref()); + Ok(serde_json::to_string(&settings::LanguageSettings { + tab_size: settings.tab_size, + })?) + } + "lsp" => { + let settings = key + .and_then(|key| { + ProjectSettings::get(location, cx) + .lsp + .get(&Arc::::from(key)) + }) + .cloned() + .unwrap_or_default(); + Ok(serde_json::to_string(&settings::LspSettings { + binary: settings.binary.map(|binary| settings::BinarySettings { + path: binary.path, + arguments: binary.arguments, + }), + settings: settings.settings, + initialization_options: settings.initialization_options, + })?) + } + _ => { + bail!("Unknown settings category: {}", category); + } + }) + } + .boxed_local() + }) + .await? + .to_wasmtime_result() + } + + async fn set_language_server_installation_status( + &mut self, + server_name: String, + status: LanguageServerInstallationStatus, + ) -> wasmtime::Result<()> { + let status = match status { + LanguageServerInstallationStatus::CheckingForUpdate => { + LanguageServerBinaryStatus::CheckingForUpdate + } + LanguageServerInstallationStatus::Downloading => { + LanguageServerBinaryStatus::Downloading + } + LanguageServerInstallationStatus::None => LanguageServerBinaryStatus::None, + LanguageServerInstallationStatus::Failed(error) => { + LanguageServerBinaryStatus::Failed { error } + } + }; + + self.host + .language_registry + .update_lsp_status(language::LanguageServerName(server_name.into()), status); + Ok(()) + } + + async fn download_file( + &mut self, + url: String, + path: String, + file_type: DownloadedFileType, + ) -> wasmtime::Result> { + maybe!(async { + let path = PathBuf::from(path); + let extension_work_dir = self.host.work_dir.join(self.manifest.id.as_ref()); + + self.host.fs.create_dir(&extension_work_dir).await?; + + let destination_path = self + .host + .writeable_path_from_extension(&self.manifest.id, &path)?; + + let mut response = self + .host + .http_client + .get(&url, Default::default(), true) + .await + .map_err(|err| anyhow!("error downloading release: {}", err))?; + + if !response.status().is_success() { + Err(anyhow!( + "download failed with status {}", + response.status().to_string() + ))?; + } + let body = BufReader::new(response.body_mut()); + + match file_type { + DownloadedFileType::Uncompressed => { + futures::pin_mut!(body); + self.host + .fs + .create_file_with(&destination_path, body) + .await?; + } + DownloadedFileType::Gzip => { + let body = GzipDecoder::new(body); + futures::pin_mut!(body); + self.host + .fs + .create_file_with(&destination_path, body) + .await?; + } + DownloadedFileType::GzipTar => { + let body = GzipDecoder::new(body); + futures::pin_mut!(body); + self.host + .fs + .extract_tar_file(&destination_path, Archive::new(body)) + .await?; + } + DownloadedFileType::Zip => { + futures::pin_mut!(body); + node_runtime::extract_zip(&destination_path, body) + .await + .with_context(|| format!("failed to unzip {} archive", path.display()))?; + } + } + + Ok(()) + }) + .await + .to_wasmtime_result() + } + + async fn make_file_executable(&mut self, path: String) -> wasmtime::Result> { + #[allow(unused)] + let path = self + .host + .writeable_path_from_extension(&self.manifest.id, Path::new(&path))?; + + #[cfg(unix)] + { + use std::fs::{self, Permissions}; + use std::os::unix::fs::PermissionsExt; + + return fs::set_permissions(&path, Permissions::from_mode(0o755)) + .map_err(|error| anyhow!("failed to set permissions for path {path:?}: {error}")) + .to_wasmtime_result(); + } + + #[cfg(not(unix))] + Ok(Ok(())) + } +} diff --git a/crates/extension_api/Cargo.toml b/crates/extension_api/Cargo.toml index 89d7ed947be34f..1a2b25b0f6a1d8 100644 --- a/crates/extension_api/Cargo.toml +++ b/crates/extension_api/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_extension_api" -version = "0.1.0" +version = "0.2.0" description = "APIs for creating Zed extensions in Rust" repository = "https://github.com/zed-industries/zed" documentation = "https://docs.rs/zed_extension_api" @@ -8,6 +8,9 @@ keywords = ["zed", "extension"] edition = "2021" license = "Apache-2.0" +# Remove when we're ready to publish v0.2.0. +publish = false + [lints] workspace = true diff --git a/crates/extension_api/wit/since_v0.2.0/common.wit b/crates/extension_api/wit/since_v0.2.0/common.wit new file mode 100644 index 00000000000000..c4f321f4c70a11 --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/common.wit @@ -0,0 +1,9 @@ +interface common { + /// A (half-open) range (`[start, end)`). + record range { + /// The start of the range (inclusive). + start: u32, + /// The end of the range (exclusive). + end: u32, + } +} diff --git a/crates/extension_api/wit/since_v0.2.0/extension.wit b/crates/extension_api/wit/since_v0.2.0/extension.wit new file mode 100644 index 00000000000000..c7599f93ffd55d --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/extension.wit @@ -0,0 +1,147 @@ +package zed:extension; + +world extension { + import github; + import http-client; + import platform; + import nodejs; + + use common.{range}; + use lsp.{completion, symbol}; + use slash-command.{slash-command, slash-command-argument-completion, slash-command-output}; + + /// Initializes the extension. + export init-extension: func(); + + /// The type of a downloaded file. + enum downloaded-file-type { + /// A gzipped file (`.gz`). + gzip, + /// A gzipped tar archive (`.tar.gz`). + gzip-tar, + /// A ZIP file (`.zip`). + zip, + /// An uncompressed file. + uncompressed, + } + + /// The installation status for a language server. + variant language-server-installation-status { + /// The language server has no installation status. + none, + /// The language server is being downloaded. + downloading, + /// The language server is checking for updates. + checking-for-update, + /// The language server installation failed for specified reason. + failed(string), + } + + record settings-location { + worktree-id: u64, + path: string, + } + + import get-settings: func(path: option, category: string, key: option) -> result; + + /// Downloads a file from the given URL and saves it to the given path within the extension's + /// working directory. + /// + /// The file will be extracted according to the given file type. + import download-file: func(url: string, file-path: string, file-type: downloaded-file-type) -> result<_, string>; + + /// Makes the file at the given path executable. + import make-file-executable: func(filepath: string) -> result<_, string>; + + /// Updates the installation status for the given language server. + import set-language-server-installation-status: func(language-server-name: string, status: language-server-installation-status); + + /// A list of environment variables. + type env-vars = list>; + + /// A command. + record command { + /// The command to execute. + command: string, + /// The arguments to pass to the command. + args: list, + /// The environment variables to set for the command. + env: env-vars, + } + + /// A Zed worktree. + resource worktree { + /// Returns the ID of the worktree. + id: func() -> u64; + /// Returns the root path of the worktree. + root-path: func() -> string; + /// Returns the textual contents of the specified file in the worktree. + read-text-file: func(path: string) -> result; + /// Returns the path to the given binary name, if one is present on the `$PATH`. + which: func(binary-name: string) -> option; + /// Returns the current shell environment. + shell-env: func() -> env-vars; + } + + /// A key-value store. + resource key-value-store { + /// Inserts an entry under the specified key. + insert: func(key: string, value: string) -> result<_, string>; + } + + /// Returns the command used to start up the language server. + export language-server-command: func(language-server-id: string, worktree: borrow) -> result; + + /// Returns the initialization options to pass to the language server on startup. + /// + /// The initialization options are represented as a JSON string. + export language-server-initialization-options: func(language-server-id: string, worktree: borrow) -> result, string>; + + /// Returns the workspace configuration options to pass to the language server. + export language-server-workspace-configuration: func(language-server-id: string, worktree: borrow) -> result, string>; + + /// A label containing some code. + record code-label { + /// The source code to parse with Tree-sitter. + code: string, + /// The spans to display in the label. + spans: list, + /// The range of the displayed label to include when filtering. + filter-range: range, + } + + /// A span within a code label. + variant code-label-span { + /// A range into the parsed code. + code-range(range), + /// A span containing a code literal. + literal(code-label-span-literal), + } + + /// A span containing a code literal. + record code-label-span-literal { + /// The literal text. + text: string, + /// The name of the highlight to use for this literal. + highlight-name: option, + } + + export labels-for-completions: func(language-server-id: string, completions: list) -> result>, string>; + export labels-for-symbols: func(language-server-id: string, symbols: list) -> result>, string>; + + /// Returns the completions that should be shown when completing the provided slash command with the given query. + export complete-slash-command-argument: func(command: slash-command, args: list) -> result, string>; + + /// Returns the output from running the provided slash command. + export run-slash-command: func(command: slash-command, args: list, worktree: option>) -> result; + + /// Returns a list of packages as suggestions to be included in the `/docs` + /// search results. + /// + /// This can be used to provide completions for known packages (e.g., from the + /// local project or a registry) before a package has been indexed. + export suggest-docs-packages: func(provider-name: string) -> result, string>; + + /// Indexes the docs for the specified package. + export index-docs: func(provider-name: string, package-name: string, database: borrow) -> result<_, string>; +} diff --git a/crates/extension_api/wit/since_v0.2.0/github.wit b/crates/extension_api/wit/since_v0.2.0/github.wit new file mode 100644 index 00000000000000..bb138f5d31bf8e --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/github.wit @@ -0,0 +1,33 @@ +interface github { + /// A GitHub release. + record github-release { + /// The version of the release. + version: string, + /// The list of assets attached to the release. + assets: list, + } + + /// An asset from a GitHub release. + record github-release-asset { + /// The name of the asset. + name: string, + /// The download URL for the asset. + download-url: string, + } + + /// The options used to filter down GitHub releases. + record github-release-options { + /// Whether releases without assets should be included. + require-assets: bool, + /// Whether pre-releases should be included. + pre-release: bool, + } + + /// Returns the latest release for the given GitHub repository. + latest-github-release: func(repo: string, options: github-release-options) -> result; + + /// Returns the GitHub release with the specified tag name for the given GitHub repository. + /// + /// Returns an error if a release with the given tag name does not exist. + github-release-by-tag-name: func(repo: string, tag: string) -> result; +} diff --git a/crates/extension_api/wit/since_v0.2.0/http-client.wit b/crates/extension_api/wit/since_v0.2.0/http-client.wit new file mode 100644 index 00000000000000..bb0206c17a52d4 --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/http-client.wit @@ -0,0 +1,67 @@ +interface http-client { + /// An HTTP request. + record http-request { + /// The HTTP method for the request. + method: http-method, + /// The URL to which the request should be made. + url: string, + /// The headers for the request. + headers: list>, + /// The request body. + body: option>, + /// The policy to use for redirects. + redirect-policy: redirect-policy, + } + + /// HTTP methods. + enum http-method { + /// `GET` + get, + /// `HEAD` + head, + /// `POST` + post, + /// `PUT` + put, + /// `DELETE` + delete, + /// `OPTIONS` + options, + /// `PATCH` + patch, + } + + /// The policy for dealing with redirects received from the server. + variant redirect-policy { + /// Redirects from the server will not be followed. + /// + /// This is the default behavior. + no-follow, + /// Redirects from the server will be followed up to the specified limit. + follow-limit(u32), + /// All redirects from the server will be followed. + follow-all, + } + + /// An HTTP response. + record http-response { + /// The response headers. + headers: list>, + /// The response body. + body: list, + } + + /// Performs an HTTP request and returns the response. + fetch: func(req: http-request) -> result; + + /// An HTTP response stream. + resource http-response-stream { + /// Retrieves the next chunk of data from the response stream. + /// + /// Returns `Ok(None)` if the stream has ended. + next-chunk: func() -> result>, string>; + } + + /// Performs an HTTP request and returns a response stream. + fetch-stream: func(req: http-request) -> result; +} diff --git a/crates/extension_api/wit/since_v0.2.0/lsp.wit b/crates/extension_api/wit/since_v0.2.0/lsp.wit new file mode 100644 index 00000000000000..19e81b6b145fe2 --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/lsp.wit @@ -0,0 +1,83 @@ +interface lsp { + /// An LSP completion. + record completion { + label: string, + detail: option, + kind: option, + insert-text-format: option, + } + + /// The kind of an LSP completion. + variant completion-kind { + text, + method, + function, + %constructor, + field, + variable, + class, + %interface, + module, + property, + unit, + value, + %enum, + keyword, + snippet, + color, + file, + reference, + folder, + enum-member, + constant, + struct, + event, + operator, + type-parameter, + other(s32), + } + + /// Defines how to interpret the insert text in a completion item. + variant insert-text-format { + plain-text, + snippet, + other(s32), + } + + /// An LSP symbol. + record symbol { + kind: symbol-kind, + name: string, + } + + /// The kind of an LSP symbol. + variant symbol-kind { + file, + module, + namespace, + %package, + class, + method, + property, + field, + %constructor, + %enum, + %interface, + function, + variable, + constant, + %string, + number, + boolean, + array, + object, + key, + null, + enum-member, + struct, + event, + operator, + type-parameter, + other(s32), + } +} diff --git a/crates/extension_api/wit/since_v0.2.0/nodejs.wit b/crates/extension_api/wit/since_v0.2.0/nodejs.wit new file mode 100644 index 00000000000000..c814548314162c --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/nodejs.wit @@ -0,0 +1,13 @@ +interface nodejs { + /// Returns the path to the Node binary used by Zed. + node-binary-path: func() -> result; + + /// Returns the latest version of the given NPM package. + npm-package-latest-version: func(package-name: string) -> result; + + /// Returns the installed version of the given NPM package, if it exists. + npm-package-installed-version: func(package-name: string) -> result, string>; + + /// Installs the specified NPM package. + npm-install-package: func(package-name: string, version: string) -> result<_, string>; +} diff --git a/crates/extension_api/wit/since_v0.2.0/platform.wit b/crates/extension_api/wit/since_v0.2.0/platform.wit new file mode 100644 index 00000000000000..48472a99bc175f --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/platform.wit @@ -0,0 +1,24 @@ +interface platform { + /// An operating system. + enum os { + /// macOS. + mac, + /// Linux. + linux, + /// Windows. + windows, + } + + /// A platform architecture. + enum architecture { + /// AArch64 (e.g., Apple Silicon). + aarch64, + /// x86. + x86, + /// x86-64. + x8664, + } + + /// Gets the current operating system and architecture. + current-platform: func() -> tuple; +} diff --git a/crates/extension_api/wit/since_v0.2.0/settings.rs b/crates/extension_api/wit/since_v0.2.0/settings.rs new file mode 100644 index 00000000000000..5c6cae70649840 --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/settings.rs @@ -0,0 +1,29 @@ +use serde::{Deserialize, Serialize}; +use std::num::NonZeroU32; + +/// The settings for a particular language. +#[derive(Debug, Serialize, Deserialize)] +pub struct LanguageSettings { + /// How many columns a tab should occupy. + pub tab_size: NonZeroU32, +} + +/// The settings for a particular language server. +#[derive(Default, Debug, Serialize, Deserialize)] +pub struct LspSettings { + /// The settings for the language server binary. + pub binary: Option, + /// The initialization options to pass to the language server. + pub initialization_options: Option, + /// The settings to pass to language server. + pub settings: Option, +} + +/// The settings for a language server binary. +#[derive(Debug, Serialize, Deserialize)] +pub struct BinarySettings { + /// The path to the binary. + pub path: Option, + /// The arguments to pass to the binary. + pub arguments: Option>, +} diff --git a/crates/extension_api/wit/since_v0.2.0/slash-command.wit b/crates/extension_api/wit/since_v0.2.0/slash-command.wit new file mode 100644 index 00000000000000..f52561c2ef412b --- /dev/null +++ b/crates/extension_api/wit/since_v0.2.0/slash-command.wit @@ -0,0 +1,41 @@ +interface slash-command { + use common.{range}; + + /// A slash command for use in the Assistant. + record slash-command { + /// The name of the slash command. + name: string, + /// The description of the slash command. + description: string, + /// The tooltip text to display for the run button. + tooltip-text: string, + /// Whether this slash command requires an argument. + requires-argument: bool, + } + + /// The output of a slash command. + record slash-command-output { + /// The text produced by the slash command. + text: string, + /// The list of sections to show in the slash command placeholder. + sections: list, + } + + /// A section in the slash command output. + record slash-command-output-section { + /// The range this section occupies. + range: range, + /// The label to display in the placeholder for this section. + label: string, + } + + /// A completion for a slash command argument. + record slash-command-argument-completion { + /// The label to display for this completion. + label: string, + /// The new text that should be inserted into the command when this completion is accepted. + new-text: string, + /// Whether the command should be run when accepting this completion. + run-command: bool, + } +} diff --git a/extensions/test-extension/Cargo.toml b/extensions/test-extension/Cargo.toml index 094302e89f47a1..5e17a9a6a3c549 100644 --- a/extensions/test-extension/Cargo.toml +++ b/extensions/test-extension/Cargo.toml @@ -13,4 +13,4 @@ path = "src/test_extension.rs" crate-type = ["cdylib"] [dependencies] -zed_extension_api = "0.1.0" +zed_extension_api = { path = "../../crates/extension_api" } From 91ffa02e2c7ee30b9a172ce5944ad96a747a453e Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 13 Sep 2024 13:17:49 -0400 Subject: [PATCH 066/762] /auto (#16696) Add `/auto` behind a feature flag that's disabled for now, even for staff. We've decided on a different design for context inference, but there are parts of /auto that will be useful for that, so we want them in the code base even if they're unused for now. Release Notes: - N/A --------- Co-authored-by: Antonio Scandurra Co-authored-by: Marshall Bowers --- Cargo.lock | 27 + Cargo.toml | 2 + crates/assistant/src/assistant.rs | 23 +- crates/assistant/src/assistant_panel.rs | 14 + crates/assistant/src/assistant_settings.rs | 1 + crates/assistant/src/slash_command.rs | 1 + .../src/slash_command/auto_command.rs | 360 ++++++ .../slash_command/prompt_after_summary.txt | 24 + .../slash_command/prompt_before_summary.txt | 31 + .../src/slash_command/search_command.rs | 9 +- crates/collab/k8s/collab.template.yml | 10 +- crates/collab/src/db/queries/projects.rs | 5 + crates/collab/src/db/queries/rooms.rs | 5 + crates/collab/src/lib.rs | 8 +- crates/collab/src/llm.rs | 4 +- crates/collab/src/llm/db/queries/providers.rs | 13 +- crates/collab/src/llm/db/seed.rs | 9 + crates/collab/src/tests/test_server.rs | 4 +- crates/feature_flags/Cargo.toml | 1 + crates/feature_flags/src/feature_flags.rs | 58 +- crates/fs/src/fs.rs | 20 +- crates/git/src/status.rs | 1 - crates/http_client/src/http_client.rs | 4 + .../language_model/src/model/cloud_model.rs | 4 +- .../language_model/src/provider/anthropic.rs | 2 +- crates/language_model/src/provider/google.rs | 4 +- crates/language_model/src/provider/open_ai.rs | 2 +- crates/language_model/src/registry.rs | 6 +- crates/project_panel/src/project_panel.rs | 1 + crates/proto/proto/zed.proto | 1 + crates/semantic_index/Cargo.toml | 4 + crates/semantic_index/examples/index.rs | 5 +- crates/semantic_index/src/embedding.rs | 12 +- crates/semantic_index/src/embedding_index.rs | 469 +++++++ crates/semantic_index/src/indexing.rs | 49 + crates/semantic_index/src/project_index.rs | 523 ++++++++ .../src/project_index_debug_view.rs | 16 +- crates/semantic_index/src/semantic_index.rs | 1129 ++--------------- crates/semantic_index/src/summary_backlog.rs | 48 + crates/semantic_index/src/summary_index.rs | 693 ++++++++++ crates/semantic_index/src/worktree_index.rs | 217 ++++ crates/worktree/src/worktree.rs | 5 + 42 files changed, 2773 insertions(+), 1051 deletions(-) create mode 100644 crates/assistant/src/slash_command/auto_command.rs create mode 100644 crates/assistant/src/slash_command/prompt_after_summary.txt create mode 100644 crates/assistant/src/slash_command/prompt_before_summary.txt create mode 100644 crates/semantic_index/src/embedding_index.rs create mode 100644 crates/semantic_index/src/indexing.rs create mode 100644 crates/semantic_index/src/project_index.rs create mode 100644 crates/semantic_index/src/summary_backlog.rs create mode 100644 crates/semantic_index/src/summary_index.rs create mode 100644 crates/semantic_index/src/worktree_index.rs diff --git a/Cargo.lock b/Cargo.lock index 5eaf3ddde1fda5..793cb66ad77b07 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -304,6 +304,9 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +dependencies = [ + "serde", +] [[package]] name = "as-raw-xcb-connection" @@ -1709,6 +1712,19 @@ dependencies = [ "profiling", ] +[[package]] +name = "blake3" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", +] + [[package]] name = "block" version = "0.1.6" @@ -2752,6 +2768,12 @@ dependencies = [ "tiny-keccak", ] +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + [[package]] name = "context_servers" version = "0.1.0" @@ -4187,6 +4209,7 @@ dependencies = [ name = "feature_flags" version = "0.1.0" dependencies = [ + "futures 0.3.30", "gpui", ] @@ -9814,10 +9837,13 @@ name = "semantic_index" version = "0.1.0" dependencies = [ "anyhow", + "arrayvec", + "blake3", "client", "clock", "collections", "env_logger", + "feature_flags", "fs", "futures 0.3.30", "futures-batch", @@ -9825,6 +9851,7 @@ dependencies = [ "heed", "http_client", "language", + "language_model", "languages", "log", "open_ai", diff --git a/Cargo.toml b/Cargo.toml index 79f5ce2dcf54d2..53109002fa1139 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -309,6 +309,7 @@ aho-corasick = "1.1" alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "91d034ff8b53867143c005acfaa14609147c9a2c" } any_vec = "0.14" anyhow = "1.0.86" +arrayvec = { version = "0.7.4", features = ["serde"] } ashpd = "0.9.1" async-compression = { version = "0.4", features = ["gzip", "futures-io"] } async-dispatcher = "0.1" @@ -325,6 +326,7 @@ bitflags = "2.6.0" blade-graphics = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" } blade-macros = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" } blade-util = { git = "https://github.com/kvark/blade", rev = "e142a3a5e678eb6a13e642ad8401b1f3aa38e969" } +blake3 = "1.5.3" cargo_metadata = "0.18" cargo_toml = "0.20" chrono = { version = "0.4", features = ["serde"] } diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index 70e37ba239eed6..7a73c188ec9dc1 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -37,13 +37,13 @@ use language_model::{ pub(crate) use model_selector::*; pub use prompts::PromptBuilder; use prompts::PromptLoadingParams; -use semantic_index::{CloudEmbeddingProvider, SemanticIndex}; +use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; use slash_command::{ - context_server_command, default_command, diagnostics_command, docs_command, fetch_command, - file_command, now_command, project_command, prompt_command, search_command, symbols_command, - tab_command, terminal_command, workflow_command, + auto_command, context_server_command, default_command, diagnostics_command, docs_command, + fetch_command, file_command, now_command, project_command, prompt_command, search_command, + symbols_command, tab_command, terminal_command, workflow_command, }; use std::path::PathBuf; use std::sync::Arc; @@ -210,12 +210,13 @@ pub fn init( let client = client.clone(); async move { let embedding_provider = CloudEmbeddingProvider::new(client.clone()); - let semantic_index = SemanticIndex::new( + let semantic_index = SemanticDb::new( paths::embeddings_dir().join("semantic-index-db.0.mdb"), Arc::new(embedding_provider), &mut cx, ) .await?; + cx.update(|cx| cx.set_global(semantic_index)) } }) @@ -364,6 +365,7 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) { fn register_slash_commands(prompt_builder: Option>, cx: &mut AppContext) { let slash_command_registry = SlashCommandRegistry::global(cx); + slash_command_registry.register_command(file_command::FileSlashCommand, true); slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true); slash_command_registry.register_command(tab_command::TabSlashCommand, true); @@ -382,6 +384,17 @@ fn register_slash_commands(prompt_builder: Option>, cx: &mut } slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); + cx.observe_flag::({ + let slash_command_registry = slash_command_registry.clone(); + move |is_enabled, _cx| { + if is_enabled { + // [#auto-staff-ship] TODO remove this when /auto is no longer staff-shipped + slash_command_registry.register_command(auto_command::AutoCommand, true); + } + } + }) + .detach(); + update_slash_commands_from_settings(cx); cx.observe_global::(update_slash_commands_from_settings) .detach(); diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 634f2231cd4f51..51c9aa9b4ea901 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -4723,6 +4723,20 @@ impl Render for ContextEditorToolbarItem { let weak_self = cx.view().downgrade(); let right_side = h_flex() .gap_2() + // TODO display this in a nicer way, once we have a design for it. + // .children({ + // let project = self + // .workspace + // .upgrade() + // .map(|workspace| workspace.read(cx).project().downgrade()); + // + // let scan_items_remaining = cx.update_global(|db: &mut SemanticDb, cx| { + // project.and_then(|project| db.remaining_summaries(&project, cx)) + // }); + + // scan_items_remaining + // .map(|remaining_items| format!("Files to scan: {}", remaining_items)) + // }) .child( ModelSelector::new( self.fs.clone(), diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index 3e326886d50d71..7939eacd9344a3 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -519,6 +519,7 @@ impl Settings for AssistantSettings { &mut settings.default_model, value.default_model.map(Into::into), ); + // merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference } Ok(settings) diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index b1a97688b2b46a..387e8231e47842 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -19,6 +19,7 @@ use std::{ use ui::ActiveTheme; use workspace::Workspace; +pub mod auto_command; pub mod context_server_command; pub mod default_command; pub mod diagnostics_command; diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs new file mode 100644 index 00000000000000..cedfc63702b9aa --- /dev/null +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -0,0 +1,360 @@ +use super::create_label_for_command; +use super::{SlashCommand, SlashCommandOutput}; +use anyhow::{anyhow, Result}; +use assistant_slash_command::ArgumentCompletion; +use feature_flags::FeatureFlag; +use futures::StreamExt; +use gpui::{AppContext, AsyncAppContext, Task, WeakView}; +use language::{CodeLabel, LspAdapterDelegate}; +use language_model::{ + LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, Role, +}; +use semantic_index::{FileSummary, SemanticDb}; +use smol::channel; +use std::sync::{atomic::AtomicBool, Arc}; +use ui::{BorrowAppContext, WindowContext}; +use util::ResultExt; +use workspace::Workspace; + +pub struct AutoSlashCommandFeatureFlag; + +impl FeatureFlag for AutoSlashCommandFeatureFlag { + const NAME: &'static str = "auto-slash-command"; +} + +pub(crate) struct AutoCommand; + +impl SlashCommand for AutoCommand { + fn name(&self) -> String { + "auto".into() + } + + fn description(&self) -> String { + "Automatically infer what context to add, based on your prompt".into() + } + + fn menu_text(&self) -> String { + "Automatically Infer Context".into() + } + + fn label(&self, cx: &AppContext) -> CodeLabel { + create_label_for_command("auto", &["--prompt"], cx) + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancel: Arc, + workspace: Option>, + cx: &mut WindowContext, + ) -> Task>> { + // There's no autocomplete for a prompt, since it's arbitrary text. + // However, we can use this opportunity to kick off a drain of the backlog. + // That way, it can hopefully be done resummarizing by the time we've actually + // typed out our prompt. This re-runs on every keystroke during autocomplete, + // but in the future, we could instead do it only once, when /auto is first entered. + let Some(workspace) = workspace.and_then(|ws| ws.upgrade()) else { + log::warn!("workspace was dropped or unavailable during /auto autocomplete"); + + return Task::ready(Ok(Vec::new())); + }; + + let project = workspace.read(cx).project().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow!("No project indexer, cannot use /auto"))); + }; + + let cx: &mut AppContext = cx; + + cx.spawn(|cx: gpui::AsyncAppContext| async move { + let task = project_index.read_with(&cx, |project_index, cx| { + project_index.flush_summary_backlogs(cx) + })?; + + cx.background_executor().spawn(task).await; + + anyhow::Ok(Vec::new()) + }) + } + + fn requires_argument(&self) -> bool { + true + } + + fn run( + self: Arc, + arguments: &[String], + workspace: WeakView, + _delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let Some(workspace) = workspace.upgrade() else { + return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); + }; + if arguments.is_empty() { + return Task::ready(Err(anyhow!("missing prompt"))); + }; + let argument = arguments.join(" "); + let original_prompt = argument.to_string(); + let project = workspace.read(cx).project().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow!("no project indexer"))); + }; + + let task = cx.spawn(|cx: gpui::AsyncWindowContext| async move { + let summaries = project_index + .read_with(&cx, |project_index, cx| project_index.all_summaries(cx))? + .await?; + + commands_for_summaries(&summaries, &original_prompt, &cx).await + }); + + // As a convenience, append /auto's argument to the end of the prompt + // so you don't have to write it again. + let original_prompt = argument.to_string(); + + cx.background_executor().spawn(async move { + let commands = task.await?; + let mut prompt = String::new(); + + log::info!( + "Translating this response into slash-commands: {:?}", + commands + ); + + for command in commands { + prompt.push('/'); + prompt.push_str(&command.name); + prompt.push(' '); + prompt.push_str(&command.arg); + prompt.push('\n'); + } + + prompt.push('\n'); + prompt.push_str(&original_prompt); + + Ok(SlashCommandOutput { + text: prompt, + sections: Vec::new(), + run_commands_in_text: true, + }) + }) + } +} + +const PROMPT_INSTRUCTIONS_BEFORE_SUMMARY: &str = include_str!("prompt_before_summary.txt"); +const PROMPT_INSTRUCTIONS_AFTER_SUMMARY: &str = include_str!("prompt_after_summary.txt"); + +fn summaries_prompt(summaries: &[FileSummary], original_prompt: &str) -> String { + let json_summaries = serde_json::to_string(summaries).unwrap(); + + format!("{PROMPT_INSTRUCTIONS_BEFORE_SUMMARY}\n{json_summaries}\n{PROMPT_INSTRUCTIONS_AFTER_SUMMARY}\n{original_prompt}") +} + +/// The slash commands that the model is told about, and which we look for in the inference response. +const SUPPORTED_SLASH_COMMANDS: &[&str] = &["search", "file"]; + +#[derive(Debug, Clone)] +struct CommandToRun { + name: String, + arg: String, +} + +/// Given the pre-indexed file summaries for this project, as well as the original prompt +/// string passed to `/auto`, get a list of slash commands to run, along with their arguments. +/// +/// The prompt's output does not include the slashes (to reduce the chance that it makes a mistake), +/// so taking one of these returned Strings and turning it into a real slash-command-with-argument +/// involves prepending a slash to it. +/// +/// This function will validate that each of the returned lines begins with one of SUPPORTED_SLASH_COMMANDS. +/// Any other lines it encounters will be discarded, with a warning logged. +async fn commands_for_summaries( + summaries: &[FileSummary], + original_prompt: &str, + cx: &AsyncAppContext, +) -> Result> { + if summaries.is_empty() { + log::warn!("Inferring no context because there were no summaries available."); + return Ok(Vec::new()); + } + + // Use the globally configured model to translate the summaries into slash-commands, + // because Qwen2-7B-Instruct has not done a good job at that task. + let Some(model) = cx.update(|cx| LanguageModelRegistry::read_global(cx).active_model())? else { + log::warn!("Can't infer context because there's no active model."); + return Ok(Vec::new()); + }; + // Only go up to 90% of the actual max token count, to reduce chances of + // exceeding the token count due to inaccuracies in the token counting heuristic. + let max_token_count = (model.max_token_count() * 9) / 10; + + // Rather than recursing (which would require this async function use a pinned box), + // we use an explicit stack of arguments and answers for when we need to "recurse." + let mut stack = vec![summaries]; + let mut final_response = Vec::new(); + let mut prompts = Vec::new(); + + // TODO We only need to create multiple Requests because we currently + // don't have the ability to tell if a CompletionProvider::complete response + // was a "too many tokens in this request" error. If we had that, then + // we could try the request once, instead of having to make separate requests + // to check the token count and then afterwards to run the actual prompt. + let make_request = |prompt: String| LanguageModelRequest { + messages: vec![LanguageModelRequestMessage { + role: Role::User, + content: vec![prompt.into()], + // Nothing in here will benefit from caching + cache: false, + }], + tools: Vec::new(), + stop: Vec::new(), + temperature: 1.0, + }; + + while let Some(current_summaries) = stack.pop() { + // The split can result in one slice being empty and the other having one element. + // Whenever that happens, skip the empty one. + if current_summaries.is_empty() { + continue; + } + + log::info!( + "Inferring prompt context using {} file summaries", + current_summaries.len() + ); + + let prompt = summaries_prompt(¤t_summaries, original_prompt); + let start = std::time::Instant::now(); + // Per OpenAI, 1 token ~= 4 chars in English (we go with 4.5 to overestimate a bit, because failed API requests cost a lot of perf) + // Verifying this against an actual model.count_tokens() confirms that it's usually within ~5% of the correct answer, whereas + // getting the correct answer from tiktoken takes hundreds of milliseconds (compared to this arithmetic being ~free). + // source: https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them + let token_estimate = prompt.len() * 2 / 9; + let duration = start.elapsed(); + log::info!( + "Time taken to count tokens for prompt of length {:?}B: {:?}", + prompt.len(), + duration + ); + + if token_estimate < max_token_count { + prompts.push(prompt); + } else if current_summaries.len() == 1 { + log::warn!("Inferring context for a single file's summary failed because the prompt's token length exceeded the model's token limit."); + } else { + log::info!( + "Context inference using file summaries resulted in a prompt containing {token_estimate} tokens, which exceeded the model's max of {max_token_count}. Retrying as two separate prompts, each including half the number of summaries.", + ); + let (left, right) = current_summaries.split_at(current_summaries.len() / 2); + stack.push(right); + stack.push(left); + } + } + + let all_start = std::time::Instant::now(); + + let (tx, rx) = channel::bounded(1024); + + let completion_streams = prompts + .into_iter() + .map(|prompt| { + let request = make_request(prompt.clone()); + let model = model.clone(); + let tx = tx.clone(); + let stream = model.stream_completion(request, &cx); + + (stream, tx) + }) + .collect::>(); + + cx.background_executor() + .spawn(async move { + let futures = completion_streams + .into_iter() + .enumerate() + .map(|(ix, (stream, tx))| async move { + let start = std::time::Instant::now(); + let events = stream.await?; + log::info!("Time taken for awaiting /await chunk stream #{ix}: {:?}", start.elapsed()); + + let completion: String = events + .filter_map(|event| async { + if let Ok(LanguageModelCompletionEvent::Text(text)) = event { + Some(text) + } else { + None + } + }) + .collect() + .await; + + log::info!("Time taken for all /auto chunks to come back for #{ix}: {:?}", start.elapsed()); + + for line in completion.split('\n') { + if let Some(first_space) = line.find(' ') { + let command = &line[..first_space].trim(); + let arg = &line[first_space..].trim(); + + tx.send(CommandToRun { + name: command.to_string(), + arg: arg.to_string(), + }) + .await?; + } else if !line.trim().is_empty() { + // All slash-commands currently supported in context inference need a space for the argument. + log::warn!( + "Context inference returned a non-blank line that contained no spaces (meaning no argument for the slash command): {:?}", + line + ); + } + } + + anyhow::Ok(()) + }) + .collect::>(); + + let _ = futures::future::try_join_all(futures).await.log_err(); + + let duration = all_start.elapsed(); + eprintln!("All futures completed in {:?}", duration); + }) + .await; + + drop(tx); // Close the channel so that rx.collect() won't hang. This is safe because all futures have completed. + let results = rx.collect::>().await; + eprintln!( + "Finished collecting from the channel with {} results", + results.len() + ); + for command in results { + // Don't return empty or duplicate commands + if !command.name.is_empty() + && !final_response + .iter() + .any(|cmd: &CommandToRun| cmd.name == command.name && cmd.arg == command.arg) + { + if SUPPORTED_SLASH_COMMANDS + .iter() + .any(|supported| &command.name == supported) + { + final_response.push(command); + } else { + log::warn!( + "Context inference returned an unrecognized slash command: {:?}", + command + ); + } + } + } + + // Sort the commands by name (reversed just so that /search appears before /file) + final_response.sort_by(|cmd1, cmd2| cmd1.name.cmp(&cmd2.name).reverse()); + + Ok(final_response) +} diff --git a/crates/assistant/src/slash_command/prompt_after_summary.txt b/crates/assistant/src/slash_command/prompt_after_summary.txt new file mode 100644 index 00000000000000..fc139a1fcb0c15 --- /dev/null +++ b/crates/assistant/src/slash_command/prompt_after_summary.txt @@ -0,0 +1,24 @@ +Actions have a cost, so only include actions that you think +will be helpful to you in doing a great job answering the +prompt in the future. + +You must respond ONLY with a list of actions you would like to +perform. Each action should be on its own line, and followed by a space and then its parameter. + +Actions can be performed more than once with different parameters. +Here is an example valid response: + +``` +file path/to/my/file.txt +file path/to/another/file.txt +search something to search for +search something else to search for +``` + +Once again, do not forget: you must respond ONLY in the format of +one action per line, and the action name should be followed by +its parameter. Your response must not include anything other +than a list of actions, with one action per line, in this format. +It is extremely important that you do not deviate from this format even slightly! + +This is the end of my instructions for how to respond. The rest is the prompt: diff --git a/crates/assistant/src/slash_command/prompt_before_summary.txt b/crates/assistant/src/slash_command/prompt_before_summary.txt new file mode 100644 index 00000000000000..5d8db1b8f7903f --- /dev/null +++ b/crates/assistant/src/slash_command/prompt_before_summary.txt @@ -0,0 +1,31 @@ +I'm going to give you a prompt. I don't want you to respond +to the prompt itself. I want you to figure out which of the following +actions on my project, if any, would help you answer the prompt. + +Here are the actions: + +## file + +This action's parameter is a file path to one of the files +in the project. If you ask for this action, I will tell you +the full contents of the file, so you can learn all the +details of the file. + +## search + +This action's parameter is a string to do a semantic search for +across the files in the project. (You will have a JSON summary +of all the files in the project.) It will tell you which files this string +(or similar strings; it is a semantic search) appear in, +as well as some context of the lines surrounding each result. +It's very important that you only use this action when you think +that searching across the specific files in this project for the query +in question will be useful. For example, don't use this command to search +for queries you might put into a general Web search engine, because those +will be too general to give useful results in this project-specific search. + +--- + +That was the end of the list of actions. + +Here is a JSON summary of each of the files in my project: diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index 4da8a5585f3824..3a513ed9ad3784 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -8,7 +8,7 @@ use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use feature_flags::FeatureFlag; use gpui::{AppContext, Task, WeakView}; use language::{CodeLabel, LineEnding, LspAdapterDelegate}; -use semantic_index::SemanticIndex; +use semantic_index::SemanticDb; use std::{ fmt::Write, path::PathBuf, @@ -92,8 +92,11 @@ impl SlashCommand for SearchSlashCommand { let project = workspace.read(cx).project().clone(); let fs = project.read(cx).fs().clone(); - let project_index = - cx.update_global(|index: &mut SemanticIndex, cx| index.project_index(project, cx)); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow::anyhow!("no project indexer"))); + }; cx.spawn(|cx| async move { let results = project_index diff --git a/crates/collab/k8s/collab.template.yml b/crates/collab/k8s/collab.template.yml index dcd935166a7de7..f5e454c3fcf04d 100644 --- a/crates/collab/k8s/collab.template.yml +++ b/crates/collab/k8s/collab.template.yml @@ -149,16 +149,16 @@ spec: secretKeyRef: name: google-ai key: api_key - - name: QWEN2_7B_API_KEY + - name: RUNPOD_API_KEY valueFrom: secretKeyRef: - name: hugging-face + name: runpod key: api_key - - name: QWEN2_7B_API_URL + - name: RUNPOD_API_SUMMARY_URL valueFrom: secretKeyRef: - name: hugging-face - key: qwen2_api_url + name: runpod + key: summary - name: BLOB_STORE_ACCESS_KEY valueFrom: secretKeyRef: diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index a6956c84966ea3..c6db54b5729f4f 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -728,6 +728,11 @@ impl Database { is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, git_status: db_entry.git_status.map(|status| status as i32), + // This is only used in the summarization backlog, so if it's None, + // that just means we won't be able to detect when to resummarize + // based on total number of backlogged bytes - instead, we'd go + // on number of files only. That shouldn't be a huge deal in practice. + size: None, is_fifo: db_entry.is_fifo, }); } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 1669ddbb3b7477..635e2d232f087f 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -663,6 +663,11 @@ impl Database { is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, git_status: db_entry.git_status.map(|status| status as i32), + // This is only used in the summarization backlog, so if it's None, + // that just means we won't be able to detect when to resummarize + // based on total number of backlogged bytes - instead, we'd go + // on number of files only. That shouldn't be a huge deal in practice. + size: None, is_fifo: db_entry.is_fifo, }); } diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs index 461adc3575badf..81ff3ff21f6926 100644 --- a/crates/collab/src/lib.rs +++ b/crates/collab/src/lib.rs @@ -170,8 +170,8 @@ pub struct Config { pub anthropic_api_key: Option>, pub anthropic_staff_api_key: Option>, pub llm_closed_beta_model_name: Option>, - pub qwen2_7b_api_key: Option>, - pub qwen2_7b_api_url: Option>, + pub runpod_api_key: Option>, + pub runpod_api_summary_url: Option>, pub zed_client_checksum_seed: Option, pub slack_panics_webhook: Option, pub auto_join_channel_id: Option, @@ -235,8 +235,8 @@ impl Config { stripe_api_key: None, stripe_price_id: None, supermaven_admin_api_key: None, - qwen2_7b_api_key: None, - qwen2_7b_api_url: None, + runpod_api_key: None, + runpod_api_summary_url: None, user_backfiller_github_access_token: None, } } diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index e1a345436810ea..def4499ae41bac 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -402,12 +402,12 @@ async fn perform_completion( LanguageModelProvider::Zed => { let api_key = state .config - .qwen2_7b_api_key + .runpod_api_key .as_ref() .context("no Qwen2-7B API key configured on the server")?; let api_url = state .config - .qwen2_7b_api_url + .runpod_api_summary_url .as_ref() .context("no Qwen2-7B URL configured on the server")?; let chunks = open_ai::stream_completion( diff --git a/crates/collab/src/llm/db/queries/providers.rs b/crates/collab/src/llm/db/queries/providers.rs index 8a73b399c67ade..7e51061ceef2ee 100644 --- a/crates/collab/src/llm/db/queries/providers.rs +++ b/crates/collab/src/llm/db/queries/providers.rs @@ -1,5 +1,5 @@ use super::*; -use sea_orm::QueryOrder; +use sea_orm::{sea_query::OnConflict, QueryOrder}; use std::str::FromStr; use strum::IntoEnumIterator as _; @@ -99,6 +99,17 @@ impl LlmDatabase { ..Default::default() } })) + .on_conflict( + OnConflict::columns([model::Column::ProviderId, model::Column::Name]) + .update_columns([ + model::Column::MaxRequestsPerMinute, + model::Column::MaxTokensPerMinute, + model::Column::MaxTokensPerDay, + model::Column::PricePerMillionInputTokens, + model::Column::PricePerMillionOutputTokens, + ]) + .to_owned(), + ) .exec_without_returning(&*tx) .await?; Ok(()) diff --git a/crates/collab/src/llm/db/seed.rs b/crates/collab/src/llm/db/seed.rs index 55c6c30cd5d8bf..24bc224227c8d2 100644 --- a/crates/collab/src/llm/db/seed.rs +++ b/crates/collab/src/llm/db/seed.rs @@ -40,6 +40,15 @@ pub async fn seed_database(_config: &Config, db: &mut LlmDatabase, _force: bool) price_per_million_input_tokens: 25, // $0.25/MTok price_per_million_output_tokens: 125, // $1.25/MTok }, + ModelParams { + provider: LanguageModelProvider::Zed, + name: "Qwen/Qwen2-7B-Instruct".into(), + max_requests_per_minute: 5, + max_tokens_per_minute: 25_000, // These are arbitrary limits we've set to cap costs; we control this number + max_tokens_per_day: 300_000, + price_per_million_input_tokens: 25, + price_per_million_output_tokens: 125, + }, ]) .await } diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index e691afceda7e08..1421e4c7f7aed7 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -679,8 +679,8 @@ impl TestServer { stripe_api_key: None, stripe_price_id: None, supermaven_admin_api_key: None, - qwen2_7b_api_key: None, - qwen2_7b_api_url: None, + runpod_api_key: None, + runpod_api_summary_url: None, user_backfiller_github_access_token: None, }, }) diff --git a/crates/feature_flags/Cargo.toml b/crates/feature_flags/Cargo.toml index 101e90c6460c98..834e315af37d97 100644 --- a/crates/feature_flags/Cargo.toml +++ b/crates/feature_flags/Cargo.toml @@ -13,3 +13,4 @@ path = "src/feature_flags.rs" [dependencies] gpui.workspace = true +futures.workspace = true diff --git a/crates/feature_flags/src/feature_flags.rs b/crates/feature_flags/src/feature_flags.rs index 29768138afe1e1..fb4e192023d914 100644 --- a/crates/feature_flags/src/feature_flags.rs +++ b/crates/feature_flags/src/feature_flags.rs @@ -1,4 +1,10 @@ +use futures::{channel::oneshot, FutureExt as _}; use gpui::{AppContext, Global, Subscription, ViewContext}; +use std::{ + future::Future, + pin::Pin, + task::{Context, Poll}, +}; #[derive(Default)] struct FeatureFlags { @@ -53,6 +59,15 @@ impl FeatureFlag for ZedPro { const NAME: &'static str = "zed-pro"; } +pub struct AutoCommand {} +impl FeatureFlag for AutoCommand { + const NAME: &'static str = "auto-command"; + + fn enabled_for_staff() -> bool { + false + } +} + pub trait FeatureFlagViewExt { fn observe_flag(&mut self, callback: F) -> Subscription where @@ -75,6 +90,7 @@ where } pub trait FeatureFlagAppExt { + fn wait_for_flag(&mut self) -> WaitForFlag; fn update_flags(&mut self, staff: bool, flags: Vec); fn set_staff(&mut self, staff: bool); fn has_flag(&self) -> bool; @@ -82,7 +98,7 @@ pub trait FeatureFlagAppExt { fn observe_flag(&mut self, callback: F) -> Subscription where - F: Fn(bool, &mut AppContext) + 'static; + F: FnMut(bool, &mut AppContext) + 'static; } impl FeatureFlagAppExt for AppContext { @@ -109,13 +125,49 @@ impl FeatureFlagAppExt for AppContext { .unwrap_or(false) } - fn observe_flag(&mut self, callback: F) -> Subscription + fn observe_flag(&mut self, mut callback: F) -> Subscription where - F: Fn(bool, &mut AppContext) + 'static, + F: FnMut(bool, &mut AppContext) + 'static, { self.observe_global::(move |cx| { let feature_flags = cx.global::(); callback(feature_flags.has_flag::(), cx); }) } + + fn wait_for_flag(&mut self) -> WaitForFlag { + let (tx, rx) = oneshot::channel::(); + let mut tx = Some(tx); + let subscription: Option; + + match self.try_global::() { + Some(feature_flags) => { + subscription = None; + tx.take().unwrap().send(feature_flags.has_flag::()).ok(); + } + None => { + subscription = Some(self.observe_global::(move |cx| { + let feature_flags = cx.global::(); + if let Some(tx) = tx.take() { + tx.send(feature_flags.has_flag::()).ok(); + } + })); + } + } + + WaitForFlag(rx, subscription) + } +} + +pub struct WaitForFlag(oneshot::Receiver, Option); + +impl Future for WaitForFlag { + type Output = bool; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + self.0.poll_unpin(cx).map(|result| { + self.1.take(); + result.unwrap_or(false) + }) + } } diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 0ec5a4c601371b..b649831fd2de69 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -171,6 +171,7 @@ pub struct Metadata { pub mtime: SystemTime, pub is_symlink: bool, pub is_dir: bool, + pub len: u64, pub is_fifo: bool, } @@ -497,6 +498,7 @@ impl Fs for RealFs { Ok(Some(Metadata { inode, mtime: metadata.modified().unwrap(), + len: metadata.len(), is_symlink, is_dir: metadata.file_type().is_dir(), is_fifo, @@ -800,11 +802,13 @@ enum FakeFsEntry { File { inode: u64, mtime: SystemTime, + len: u64, content: Vec, }, Dir { inode: u64, mtime: SystemTime, + len: u64, entries: BTreeMap>>, git_repo_state: Option>>, }, @@ -935,6 +939,7 @@ impl FakeFs { root: Arc::new(Mutex::new(FakeFsEntry::Dir { inode: 0, mtime: SystemTime::UNIX_EPOCH, + len: 0, entries: Default::default(), git_repo_state: None, })), @@ -969,6 +974,7 @@ impl FakeFs { inode: new_inode, mtime: new_mtime, content: Vec::new(), + len: 0, }))); } btree_map::Entry::Occupied(mut e) => match &mut *e.get_mut().lock() { @@ -1016,6 +1022,7 @@ impl FakeFs { let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, + len: content.len() as u64, content, })); let mut kind = None; @@ -1369,6 +1376,7 @@ impl Fs for FakeFs { Arc::new(Mutex::new(FakeFsEntry::Dir { inode, mtime, + len: 0, entries: Default::default(), git_repo_state: None, })) @@ -1391,6 +1399,7 @@ impl Fs for FakeFs { let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, + len: 0, content: Vec::new(), })); let mut kind = Some(PathEventKind::Created); @@ -1539,6 +1548,7 @@ impl Fs for FakeFs { e.insert(Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, + len: content.len() as u64, content: Vec::new(), }))) .clone(), @@ -1694,16 +1704,22 @@ impl Fs for FakeFs { let entry = entry.lock(); Ok(Some(match &*entry { - FakeFsEntry::File { inode, mtime, .. } => Metadata { + FakeFsEntry::File { + inode, mtime, len, .. + } => Metadata { inode: *inode, mtime: *mtime, + len: *len, is_dir: false, is_symlink, is_fifo: false, }, - FakeFsEntry::Dir { inode, mtime, .. } => Metadata { + FakeFsEntry::Dir { + inode, mtime, len, .. + } => Metadata { inode: *inode, mtime: *mtime, + len: *len, is_dir: true, is_symlink, is_fifo: false, diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index e6098ffd3c47bf..6eb98ecefe70d2 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -57,7 +57,6 @@ impl GitStatus { let stderr = String::from_utf8_lossy(&output.stderr); return Err(anyhow!("git status process failed: {}", stderr)); } - let stdout = String::from_utf8_lossy(&output.stdout); let mut entries = stdout .split('\0') diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 452be0a2430dad..1841a1f394627d 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -221,6 +221,10 @@ impl HttpClient for HttpClientWithUrl { pub fn client(user_agent: Option, proxy: Option) -> Arc { let mut builder = isahc::HttpClient::builder() + // Some requests to Qwen2 models on Runpod can take 32+ seconds, + // especially if there's a cold boot involved. We may need to have + // those requests use a different http client, because global timeouts + // of 50 and 60 seconds, respectively, would be very high! .connect_timeout(Duration::from_secs(5)) .low_speed_timeout(100, Duration::from_secs(5)) .proxy(proxy.clone()); diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index f36b6b2788ae05..be0812eab90e70 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -17,14 +17,14 @@ pub enum CloudModel { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, EnumIter)] pub enum ZedModel { - #[serde(rename = "qwen2-7b-instruct")] + #[serde(rename = "Qwen/Qwen2-7B-Instruct")] Qwen2_7bInstruct, } impl ZedModel { pub fn id(&self) -> &str { match self { - ZedModel::Qwen2_7bInstruct => "qwen2-7b-instruct", + ZedModel::Qwen2_7bInstruct => "Qwen/Qwen2-7B-Instruct", } } diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index eac4ad3021ee1d..9f7135aef7d73f 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -319,7 +319,7 @@ impl AnthropicModel { }; async move { - let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let api_key = api_key.ok_or_else(|| anyhow!("Missing Anthropic API Key"))?; let request = anthropic::stream_completion( http_client.as_ref(), &api_url, diff --git a/crates/language_model/src/provider/google.rs b/crates/language_model/src/provider/google.rs index fc4a7a7a3405a0..005f35ff8b1bfe 100644 --- a/crates/language_model/src/provider/google.rs +++ b/crates/language_model/src/provider/google.rs @@ -265,7 +265,7 @@ impl LanguageModel for GoogleLanguageModel { let low_speed_timeout = settings.low_speed_timeout; async move { - let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let api_key = api_key.ok_or_else(|| anyhow!("Missing Google API key"))?; let response = google_ai::count_tokens( http_client.as_ref(), &api_url, @@ -304,7 +304,7 @@ impl LanguageModel for GoogleLanguageModel { }; let future = self.rate_limiter.stream(async move { - let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let api_key = api_key.ok_or_else(|| anyhow!("Missing Google API Key"))?; let response = stream_generate_content( http_client.as_ref(), &api_url, diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 3a371499eb44db..fe5e60caec8a95 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -239,7 +239,7 @@ impl OpenAiLanguageModel { }; let future = self.request_limiter.stream(async move { - let api_key = api_key.ok_or_else(|| anyhow!("missing api key"))?; + let api_key = api_key.ok_or_else(|| anyhow!("Missing OpenAI API Key"))?; let request = stream_completion( http_client.as_ref(), &api_url, diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index 589dfe776a8d8c..b3c8ef5f57cc6b 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -159,11 +159,13 @@ impl LanguageModelRegistry { providers } - pub fn available_models(&self, cx: &AppContext) -> Vec> { + pub fn available_models<'a>( + &'a self, + cx: &'a AppContext, + ) -> impl Iterator> + 'a { self.providers .values() .flat_map(|provider| provider.provided_models(cx)) - .collect() } pub fn provider(&self, id: &LanguageModelProviderId) -> Option> { diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c77a2170dd01cd..c8e1ce28eb5938 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1823,6 +1823,7 @@ impl ProjectPanel { path: entry.path.join("\0").into(), inode: 0, mtime: entry.mtime, + size: entry.size, is_ignored: entry.is_ignored, is_external: false, is_private: false, diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index e5d767fffb3fae..f59e8146b6f18c 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1855,6 +1855,7 @@ message Entry { bool is_external = 8; optional GitStatus git_status = 9; bool is_fifo = 10; + optional uint64 size = 11; } message RepositoryEntry { diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index 4fd3a86b29f702..c8dbb6a9f53dd2 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -19,14 +19,18 @@ crate-type = ["bin"] [dependencies] anyhow.workspace = true +arrayvec.workspace = true +blake3.workspace = true client.workspace = true clock.workspace = true collections.workspace = true +feature_flags.workspace = true fs.workspace = true futures.workspace = true futures-batch.workspace = true gpui.workspace = true language.workspace = true +language_model.workspace = true log.workspace = true heed.workspace = true http_client.workspace = true diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index e536ea1db606d1..977473d1dc38ba 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -4,7 +4,7 @@ use gpui::App; use http_client::HttpClientWithUrl; use language::language_settings::AllLanguageSettings; use project::Project; -use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticIndex}; +use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb}; use settings::SettingsStore; use std::{ path::{Path, PathBuf}, @@ -50,7 +50,7 @@ fn main() { )); cx.spawn(|mut cx| async move { - let semantic_index = SemanticIndex::new( + let semantic_index = SemanticDb::new( PathBuf::from("/tmp/semantic-index-db.mdb"), embedding_provider, &mut cx, @@ -71,6 +71,7 @@ fn main() { let project_index = cx .update(|cx| semantic_index.project_index(project.clone(), cx)) + .unwrap() .unwrap(); let (tx, rx) = oneshot::channel(); diff --git a/crates/semantic_index/src/embedding.rs b/crates/semantic_index/src/embedding.rs index b5195c891177d4..b05c4ac9da0ef2 100644 --- a/crates/semantic_index/src/embedding.rs +++ b/crates/semantic_index/src/embedding.rs @@ -12,6 +12,12 @@ use futures::{future::BoxFuture, FutureExt}; use serde::{Deserialize, Serialize}; use std::{fmt, future}; +/// Trait for embedding providers. Texts in, vectors out. +pub trait EmbeddingProvider: Sync + Send { + fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result>>; + fn batch_size(&self) -> usize; +} + #[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] pub struct Embedding(Vec); @@ -68,12 +74,6 @@ impl fmt::Display for Embedding { } } -/// Trait for embedding providers. Texts in, vectors out. -pub trait EmbeddingProvider: Sync + Send { - fn embed<'a>(&'a self, texts: &'a [TextToEmbed<'a>]) -> BoxFuture<'a, Result>>; - fn batch_size(&self) -> usize; -} - #[derive(Debug)] pub struct TextToEmbed<'a> { pub text: &'a str, diff --git a/crates/semantic_index/src/embedding_index.rs b/crates/semantic_index/src/embedding_index.rs new file mode 100644 index 00000000000000..dd7c58dc11a760 --- /dev/null +++ b/crates/semantic_index/src/embedding_index.rs @@ -0,0 +1,469 @@ +use crate::{ + chunking::{self, Chunk}, + embedding::{Embedding, EmbeddingProvider, TextToEmbed}, + indexing::{IndexingEntryHandle, IndexingEntrySet}, +}; +use anyhow::{anyhow, Context as _, Result}; +use collections::Bound; +use fs::Fs; +use futures::stream::StreamExt; +use futures_batch::ChunksTimeoutStreamExt; +use gpui::{AppContext, Model, Task}; +use heed::types::{SerdeBincode, Str}; +use language::LanguageRegistry; +use log; +use project::{Entry, UpdatedEntriesSet, Worktree}; +use serde::{Deserialize, Serialize}; +use smol::channel; +use std::{ + cmp::Ordering, + future::Future, + iter, + path::Path, + sync::Arc, + time::{Duration, SystemTime}, +}; +use util::ResultExt; +use worktree::Snapshot; + +pub struct EmbeddingIndex { + worktree: Model, + db_connection: heed::Env, + db: heed::Database>, + fs: Arc, + language_registry: Arc, + embedding_provider: Arc, + entry_ids_being_indexed: Arc, +} + +impl EmbeddingIndex { + pub fn new( + worktree: Model, + fs: Arc, + db_connection: heed::Env, + embedding_db: heed::Database>, + language_registry: Arc, + embedding_provider: Arc, + entry_ids_being_indexed: Arc, + ) -> Self { + Self { + worktree, + fs, + db_connection, + db: embedding_db, + language_registry, + embedding_provider, + entry_ids_being_indexed, + } + } + + pub fn db(&self) -> &heed::Database> { + &self.db + } + + pub fn index_entries_changed_on_disk( + &self, + cx: &AppContext, + ) -> impl Future> { + let worktree = self.worktree.read(cx).snapshot(); + let worktree_abs_path = worktree.abs_path().clone(); + let scan = self.scan_entries(worktree, cx); + let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx); + let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx); + let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx); + async move { + futures::try_join!(scan.task, chunk.task, embed.task, persist)?; + Ok(()) + } + } + + pub fn index_updated_entries( + &self, + updated_entries: UpdatedEntriesSet, + cx: &AppContext, + ) -> impl Future> { + let worktree = self.worktree.read(cx).snapshot(); + let worktree_abs_path = worktree.abs_path().clone(); + let scan = self.scan_updated_entries(worktree, updated_entries.clone(), cx); + let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx); + let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx); + let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx); + async move { + futures::try_join!(scan.task, chunk.task, embed.task, persist)?; + Ok(()) + } + } + + fn scan_entries(&self, worktree: Snapshot, cx: &AppContext) -> ScanEntries { + let (updated_entries_tx, updated_entries_rx) = channel::bounded(512); + let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); + let db_connection = self.db_connection.clone(); + let db = self.db; + let entries_being_indexed = self.entry_ids_being_indexed.clone(); + let task = cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create read transaction")?; + let mut db_entries = db + .iter(&txn) + .context("failed to create iterator")? + .move_between_keys() + .peekable(); + + let mut deletion_range: Option<(Bound<&str>, Bound<&str>)> = None; + for entry in worktree.files(false, 0) { + log::trace!("scanning for embedding index: {:?}", &entry.path); + + let entry_db_key = db_key_for_path(&entry.path); + + let mut saved_mtime = None; + while let Some(db_entry) = db_entries.peek() { + match db_entry { + Ok((db_path, db_embedded_file)) => match (*db_path).cmp(&entry_db_key) { + Ordering::Less => { + if let Some(deletion_range) = deletion_range.as_mut() { + deletion_range.1 = Bound::Included(db_path); + } else { + deletion_range = + Some((Bound::Included(db_path), Bound::Included(db_path))); + } + + db_entries.next(); + } + Ordering::Equal => { + if let Some(deletion_range) = deletion_range.take() { + deleted_entry_ranges_tx + .send(( + deletion_range.0.map(ToString::to_string), + deletion_range.1.map(ToString::to_string), + )) + .await?; + } + saved_mtime = db_embedded_file.mtime; + db_entries.next(); + break; + } + Ordering::Greater => { + break; + } + }, + Err(_) => return Err(db_entries.next().unwrap().unwrap_err())?, + } + } + + if entry.mtime != saved_mtime { + let handle = entries_being_indexed.insert(entry.id); + updated_entries_tx.send((entry.clone(), handle)).await?; + } + } + + if let Some(db_entry) = db_entries.next() { + let (db_path, _) = db_entry?; + deleted_entry_ranges_tx + .send((Bound::Included(db_path.to_string()), Bound::Unbounded)) + .await?; + } + + Ok(()) + }); + + ScanEntries { + updated_entries: updated_entries_rx, + deleted_entry_ranges: deleted_entry_ranges_rx, + task, + } + } + + fn scan_updated_entries( + &self, + worktree: Snapshot, + updated_entries: UpdatedEntriesSet, + cx: &AppContext, + ) -> ScanEntries { + let (updated_entries_tx, updated_entries_rx) = channel::bounded(512); + let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); + let entries_being_indexed = self.entry_ids_being_indexed.clone(); + let task = cx.background_executor().spawn(async move { + for (path, entry_id, status) in updated_entries.iter() { + match status { + project::PathChange::Added + | project::PathChange::Updated + | project::PathChange::AddedOrUpdated => { + if let Some(entry) = worktree.entry_for_id(*entry_id) { + if entry.is_file() { + let handle = entries_being_indexed.insert(entry.id); + updated_entries_tx.send((entry.clone(), handle)).await?; + } + } + } + project::PathChange::Removed => { + let db_path = db_key_for_path(path); + deleted_entry_ranges_tx + .send((Bound::Included(db_path.clone()), Bound::Included(db_path))) + .await?; + } + project::PathChange::Loaded => { + // Do nothing. + } + } + } + + Ok(()) + }); + + ScanEntries { + updated_entries: updated_entries_rx, + deleted_entry_ranges: deleted_entry_ranges_rx, + task, + } + } + + fn chunk_files( + &self, + worktree_abs_path: Arc, + entries: channel::Receiver<(Entry, IndexingEntryHandle)>, + cx: &AppContext, + ) -> ChunkFiles { + let language_registry = self.language_registry.clone(); + let fs = self.fs.clone(); + let (chunked_files_tx, chunked_files_rx) = channel::bounded(2048); + let task = cx.spawn(|cx| async move { + cx.background_executor() + .scoped(|cx| { + for _ in 0..cx.num_cpus() { + cx.spawn(async { + while let Ok((entry, handle)) = entries.recv().await { + let entry_abs_path = worktree_abs_path.join(&entry.path); + match fs.load(&entry_abs_path).await { + Ok(text) => { + let language = language_registry + .language_for_file_path(&entry.path) + .await + .ok(); + let chunked_file = ChunkedFile { + chunks: chunking::chunk_text( + &text, + language.as_ref(), + &entry.path, + ), + handle, + path: entry.path, + mtime: entry.mtime, + text, + }; + + if chunked_files_tx.send(chunked_file).await.is_err() { + return; + } + } + Err(_)=> { + log::error!("Failed to read contents into a UTF-8 string: {entry_abs_path:?}"); + } + } + } + }); + } + }) + .await; + Ok(()) + }); + + ChunkFiles { + files: chunked_files_rx, + task, + } + } + + pub fn embed_files( + embedding_provider: Arc, + chunked_files: channel::Receiver, + cx: &AppContext, + ) -> EmbedFiles { + let embedding_provider = embedding_provider.clone(); + let (embedded_files_tx, embedded_files_rx) = channel::bounded(512); + let task = cx.background_executor().spawn(async move { + let mut chunked_file_batches = + chunked_files.chunks_timeout(512, Duration::from_secs(2)); + while let Some(chunked_files) = chunked_file_batches.next().await { + // View the batch of files as a vec of chunks + // Flatten out to a vec of chunks that we can subdivide into batch sized pieces + // Once those are done, reassemble them back into the files in which they belong + // If any embeddings fail for a file, the entire file is discarded + + let chunks: Vec = chunked_files + .iter() + .flat_map(|file| { + file.chunks.iter().map(|chunk| TextToEmbed { + text: &file.text[chunk.range.clone()], + digest: chunk.digest, + }) + }) + .collect::>(); + + let mut embeddings: Vec> = Vec::new(); + for embedding_batch in chunks.chunks(embedding_provider.batch_size()) { + if let Some(batch_embeddings) = + embedding_provider.embed(embedding_batch).await.log_err() + { + if batch_embeddings.len() == embedding_batch.len() { + embeddings.extend(batch_embeddings.into_iter().map(Some)); + continue; + } + log::error!( + "embedding provider returned unexpected embedding count {}, expected {}", + batch_embeddings.len(), embedding_batch.len() + ); + } + + embeddings.extend(iter::repeat(None).take(embedding_batch.len())); + } + + let mut embeddings = embeddings.into_iter(); + for chunked_file in chunked_files { + let mut embedded_file = EmbeddedFile { + path: chunked_file.path, + mtime: chunked_file.mtime, + chunks: Vec::new(), + }; + + let mut embedded_all_chunks = true; + for (chunk, embedding) in + chunked_file.chunks.into_iter().zip(embeddings.by_ref()) + { + if let Some(embedding) = embedding { + embedded_file + .chunks + .push(EmbeddedChunk { chunk, embedding }); + } else { + embedded_all_chunks = false; + } + } + + if embedded_all_chunks { + embedded_files_tx + .send((embedded_file, chunked_file.handle)) + .await?; + } + } + } + Ok(()) + }); + + EmbedFiles { + files: embedded_files_rx, + task, + } + } + + fn persist_embeddings( + &self, + mut deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, + embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, + cx: &AppContext, + ) -> Task> { + let db_connection = self.db_connection.clone(); + let db = self.db; + cx.background_executor().spawn(async move { + while let Some(deletion_range) = deleted_entry_ranges.next().await { + let mut txn = db_connection.write_txn()?; + let start = deletion_range.0.as_ref().map(|start| start.as_str()); + let end = deletion_range.1.as_ref().map(|end| end.as_str()); + log::debug!("deleting embeddings in range {:?}", &(start, end)); + db.delete_range(&mut txn, &(start, end))?; + txn.commit()?; + } + + let mut embedded_files = embedded_files.chunks_timeout(4096, Duration::from_secs(2)); + while let Some(embedded_files) = embedded_files.next().await { + let mut txn = db_connection.write_txn()?; + for (file, _) in &embedded_files { + log::debug!("saving embedding for file {:?}", file.path); + let key = db_key_for_path(&file.path); + db.put(&mut txn, &key, file)?; + } + txn.commit()?; + + drop(embedded_files); + log::debug!("committed"); + } + + Ok(()) + }) + } + + pub fn paths(&self, cx: &AppContext) -> Task>>> { + let connection = self.db_connection.clone(); + let db = self.db; + cx.background_executor().spawn(async move { + let tx = connection + .read_txn() + .context("failed to create read transaction")?; + let result = db + .iter(&tx)? + .map(|entry| Ok(entry?.1.path.clone())) + .collect::>>>(); + drop(tx); + result + }) + } + + pub fn chunks_for_path( + &self, + path: Arc, + cx: &AppContext, + ) -> Task>> { + let connection = self.db_connection.clone(); + let db = self.db; + cx.background_executor().spawn(async move { + let tx = connection + .read_txn() + .context("failed to create read transaction")?; + Ok(db + .get(&tx, &db_key_for_path(&path))? + .ok_or_else(|| anyhow!("no such path"))? + .chunks + .clone()) + }) + } +} + +struct ScanEntries { + updated_entries: channel::Receiver<(Entry, IndexingEntryHandle)>, + deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, + task: Task>, +} + +struct ChunkFiles { + files: channel::Receiver, + task: Task>, +} + +pub struct ChunkedFile { + pub path: Arc, + pub mtime: Option, + pub handle: IndexingEntryHandle, + pub text: String, + pub chunks: Vec, +} + +pub struct EmbedFiles { + pub files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, + pub task: Task>, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct EmbeddedFile { + pub path: Arc, + pub mtime: Option, + pub chunks: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct EmbeddedChunk { + pub chunk: Chunk, + pub embedding: Embedding, +} + +fn db_key_for_path(path: &Arc) -> String { + path.to_string_lossy().replace('/', "\0") +} diff --git a/crates/semantic_index/src/indexing.rs b/crates/semantic_index/src/indexing.rs new file mode 100644 index 00000000000000..aca9504891d0f7 --- /dev/null +++ b/crates/semantic_index/src/indexing.rs @@ -0,0 +1,49 @@ +use collections::HashSet; +use parking_lot::Mutex; +use project::ProjectEntryId; +use smol::channel; +use std::sync::{Arc, Weak}; + +/// The set of entries that are currently being indexed. +pub struct IndexingEntrySet { + entry_ids: Mutex>, + tx: channel::Sender<()>, +} + +/// When dropped, removes the entry from the set of entries that are being indexed. +#[derive(Clone)] +pub(crate) struct IndexingEntryHandle { + entry_id: ProjectEntryId, + set: Weak, +} + +impl IndexingEntrySet { + pub fn new(tx: channel::Sender<()>) -> Self { + Self { + entry_ids: Default::default(), + tx, + } + } + + pub fn insert(self: &Arc, entry_id: ProjectEntryId) -> IndexingEntryHandle { + self.entry_ids.lock().insert(entry_id); + self.tx.send_blocking(()).ok(); + IndexingEntryHandle { + entry_id, + set: Arc::downgrade(self), + } + } + + pub fn len(&self) -> usize { + self.entry_ids.lock().len() + } +} + +impl Drop for IndexingEntryHandle { + fn drop(&mut self) { + if let Some(set) = self.set.upgrade() { + set.tx.send_blocking(()).ok(); + set.entry_ids.lock().remove(&self.entry_id); + } + } +} diff --git a/crates/semantic_index/src/project_index.rs b/crates/semantic_index/src/project_index.rs new file mode 100644 index 00000000000000..84a72c1a3d8c61 --- /dev/null +++ b/crates/semantic_index/src/project_index.rs @@ -0,0 +1,523 @@ +use crate::{ + embedding::{EmbeddingProvider, TextToEmbed}, + summary_index::FileSummary, + worktree_index::{WorktreeIndex, WorktreeIndexHandle}, +}; +use anyhow::{anyhow, Context, Result}; +use collections::HashMap; +use fs::Fs; +use futures::{stream::StreamExt, FutureExt}; +use gpui::{ + AppContext, Entity, EntityId, EventEmitter, Model, ModelContext, Subscription, Task, WeakModel, +}; +use language::LanguageRegistry; +use log; +use project::{Project, Worktree, WorktreeId}; +use serde::{Deserialize, Serialize}; +use smol::channel; +use std::{cmp::Ordering, future::Future, num::NonZeroUsize, ops::Range, path::Path, sync::Arc}; +use util::ResultExt; + +#[derive(Debug)] +pub struct SearchResult { + pub worktree: Model, + pub path: Arc, + pub range: Range, + pub score: f32, +} + +pub struct WorktreeSearchResult { + pub worktree_id: WorktreeId, + pub path: Arc, + pub range: Range, + pub score: f32, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub enum Status { + Idle, + Loading, + Scanning { remaining_count: NonZeroUsize }, +} + +pub struct ProjectIndex { + db_connection: heed::Env, + project: WeakModel, + worktree_indices: HashMap, + language_registry: Arc, + fs: Arc, + last_status: Status, + status_tx: channel::Sender<()>, + embedding_provider: Arc, + _maintain_status: Task<()>, + _subscription: Subscription, +} + +impl ProjectIndex { + pub fn new( + project: Model, + db_connection: heed::Env, + embedding_provider: Arc, + cx: &mut ModelContext, + ) -> Self { + let language_registry = project.read(cx).languages().clone(); + let fs = project.read(cx).fs().clone(); + let (status_tx, mut status_rx) = channel::unbounded(); + let mut this = ProjectIndex { + db_connection, + project: project.downgrade(), + worktree_indices: HashMap::default(), + language_registry, + fs, + status_tx, + last_status: Status::Idle, + embedding_provider, + _subscription: cx.subscribe(&project, Self::handle_project_event), + _maintain_status: cx.spawn(|this, mut cx| async move { + while status_rx.next().await.is_some() { + if this + .update(&mut cx, |this, cx| this.update_status(cx)) + .is_err() + { + break; + } + } + }), + }; + this.update_worktree_indices(cx); + this + } + + pub fn status(&self) -> Status { + self.last_status + } + + pub fn project(&self) -> WeakModel { + self.project.clone() + } + + pub fn fs(&self) -> Arc { + self.fs.clone() + } + + fn handle_project_event( + &mut self, + _: Model, + event: &project::Event, + cx: &mut ModelContext, + ) { + match event { + project::Event::WorktreeAdded | project::Event::WorktreeRemoved(_) => { + self.update_worktree_indices(cx); + } + _ => {} + } + } + + fn update_worktree_indices(&mut self, cx: &mut ModelContext) { + let Some(project) = self.project.upgrade() else { + return; + }; + + let worktrees = project + .read(cx) + .visible_worktrees(cx) + .filter_map(|worktree| { + if worktree.read(cx).is_local() { + Some((worktree.entity_id(), worktree)) + } else { + None + } + }) + .collect::>(); + + self.worktree_indices + .retain(|worktree_id, _| worktrees.contains_key(worktree_id)); + for (worktree_id, worktree) in worktrees { + self.worktree_indices.entry(worktree_id).or_insert_with(|| { + let worktree_index = WorktreeIndex::load( + worktree.clone(), + self.db_connection.clone(), + self.language_registry.clone(), + self.fs.clone(), + self.status_tx.clone(), + self.embedding_provider.clone(), + cx, + ); + + let load_worktree = cx.spawn(|this, mut cx| async move { + let result = match worktree_index.await { + Ok(worktree_index) => { + this.update(&mut cx, |this, _| { + this.worktree_indices.insert( + worktree_id, + WorktreeIndexHandle::Loaded { + index: worktree_index.clone(), + }, + ); + })?; + Ok(worktree_index) + } + Err(error) => { + this.update(&mut cx, |this, _cx| { + this.worktree_indices.remove(&worktree_id) + })?; + Err(Arc::new(error)) + } + }; + + this.update(&mut cx, |this, cx| this.update_status(cx))?; + + result + }); + + WorktreeIndexHandle::Loading { + index: load_worktree.shared(), + } + }); + } + + self.update_status(cx); + } + + fn update_status(&mut self, cx: &mut ModelContext) { + let mut indexing_count = 0; + let mut any_loading = false; + + for index in self.worktree_indices.values_mut() { + match index { + WorktreeIndexHandle::Loading { .. } => { + any_loading = true; + break; + } + WorktreeIndexHandle::Loaded { index, .. } => { + indexing_count += index.read(cx).entry_ids_being_indexed().len(); + } + } + } + + let status = if any_loading { + Status::Loading + } else if let Some(remaining_count) = NonZeroUsize::new(indexing_count) { + Status::Scanning { remaining_count } + } else { + Status::Idle + }; + + if status != self.last_status { + self.last_status = status; + cx.emit(status); + } + } + + pub fn search( + &self, + query: String, + limit: usize, + cx: &AppContext, + ) -> Task>> { + let (chunks_tx, chunks_rx) = channel::bounded(1024); + let mut worktree_scan_tasks = Vec::new(); + for worktree_index in self.worktree_indices.values() { + let worktree_index = worktree_index.clone(); + let chunks_tx = chunks_tx.clone(); + worktree_scan_tasks.push(cx.spawn(|cx| async move { + let index = match worktree_index { + WorktreeIndexHandle::Loading { index } => { + index.clone().await.map_err(|error| anyhow!(error))? + } + WorktreeIndexHandle::Loaded { index } => index.clone(), + }; + + index + .read_with(&cx, |index, cx| { + let worktree_id = index.worktree().read(cx).id(); + let db_connection = index.db_connection().clone(); + let db = *index.embedding_index().db(); + cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create read transaction")?; + let db_entries = db.iter(&txn).context("failed to iterate database")?; + for db_entry in db_entries { + let (_key, db_embedded_file) = db_entry?; + for chunk in db_embedded_file.chunks { + chunks_tx + .send((worktree_id, db_embedded_file.path.clone(), chunk)) + .await?; + } + } + anyhow::Ok(()) + }) + })? + .await + })); + } + drop(chunks_tx); + + let project = self.project.clone(); + let embedding_provider = self.embedding_provider.clone(); + cx.spawn(|cx| async move { + #[cfg(debug_assertions)] + let embedding_query_start = std::time::Instant::now(); + log::info!("Searching for {query}"); + + let query_embeddings = embedding_provider + .embed(&[TextToEmbed::new(&query)]) + .await?; + let query_embedding = query_embeddings + .into_iter() + .next() + .ok_or_else(|| anyhow!("no embedding for query"))?; + + let mut results_by_worker = Vec::new(); + for _ in 0..cx.background_executor().num_cpus() { + results_by_worker.push(Vec::::new()); + } + + #[cfg(debug_assertions)] + let search_start = std::time::Instant::now(); + + cx.background_executor() + .scoped(|cx| { + for results in results_by_worker.iter_mut() { + cx.spawn(async { + while let Ok((worktree_id, path, chunk)) = chunks_rx.recv().await { + let score = chunk.embedding.similarity(&query_embedding); + let ix = match results.binary_search_by(|probe| { + score.partial_cmp(&probe.score).unwrap_or(Ordering::Equal) + }) { + Ok(ix) | Err(ix) => ix, + }; + results.insert( + ix, + WorktreeSearchResult { + worktree_id, + path: path.clone(), + range: chunk.chunk.range.clone(), + score, + }, + ); + results.truncate(limit); + } + }); + } + }) + .await; + + for scan_task in futures::future::join_all(worktree_scan_tasks).await { + scan_task.log_err(); + } + + project.read_with(&cx, |project, cx| { + let mut search_results = Vec::with_capacity(results_by_worker.len() * limit); + for worker_results in results_by_worker { + search_results.extend(worker_results.into_iter().filter_map(|result| { + Some(SearchResult { + worktree: project.worktree_for_id(result.worktree_id, cx)?, + path: result.path, + range: result.range, + score: result.score, + }) + })); + } + search_results.sort_unstable_by(|a, b| { + b.score.partial_cmp(&a.score).unwrap_or(Ordering::Equal) + }); + search_results.truncate(limit); + + #[cfg(debug_assertions)] + { + let search_elapsed = search_start.elapsed(); + log::debug!( + "searched {} entries in {:?}", + search_results.len(), + search_elapsed + ); + let embedding_query_elapsed = embedding_query_start.elapsed(); + log::debug!("embedding query took {:?}", embedding_query_elapsed); + } + + search_results + }) + }) + } + + #[cfg(test)] + pub fn path_count(&self, cx: &AppContext) -> Result { + let mut result = 0; + for worktree_index in self.worktree_indices.values() { + if let WorktreeIndexHandle::Loaded { index, .. } = worktree_index { + result += index.read(cx).path_count()?; + } + } + Ok(result) + } + + pub(crate) fn worktree_index( + &self, + worktree_id: WorktreeId, + cx: &AppContext, + ) -> Option> { + for index in self.worktree_indices.values() { + if let WorktreeIndexHandle::Loaded { index, .. } = index { + if index.read(cx).worktree().read(cx).id() == worktree_id { + return Some(index.clone()); + } + } + } + None + } + + pub(crate) fn worktree_indices(&self, cx: &AppContext) -> Vec> { + let mut result = self + .worktree_indices + .values() + .filter_map(|index| { + if let WorktreeIndexHandle::Loaded { index, .. } = index { + Some(index.clone()) + } else { + None + } + }) + .collect::>(); + result.sort_by_key(|index| index.read(cx).worktree().read(cx).id()); + result + } + + pub fn all_summaries(&self, cx: &AppContext) -> Task>> { + let (summaries_tx, summaries_rx) = channel::bounded(1024); + let mut worktree_scan_tasks = Vec::new(); + for worktree_index in self.worktree_indices.values() { + let worktree_index = worktree_index.clone(); + let summaries_tx: channel::Sender<(String, String)> = summaries_tx.clone(); + worktree_scan_tasks.push(cx.spawn(|cx| async move { + let index = match worktree_index { + WorktreeIndexHandle::Loading { index } => { + index.clone().await.map_err(|error| anyhow!(error))? + } + WorktreeIndexHandle::Loaded { index } => index.clone(), + }; + + index + .read_with(&cx, |index, cx| { + let db_connection = index.db_connection().clone(); + let summary_index = index.summary_index(); + let file_digest_db = summary_index.file_digest_db(); + let summary_db = summary_index.summary_db(); + + cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create db read transaction")?; + let db_entries = file_digest_db + .iter(&txn) + .context("failed to iterate database")?; + for db_entry in db_entries { + let (file_path, db_file) = db_entry?; + + match summary_db.get(&txn, &db_file.digest) { + Ok(opt_summary) => { + // Currently, we only use summaries we already have. If the file hasn't been + // summarized yet, then we skip it and don't include it in the inferred context. + // If we want to do just-in-time summarization, this would be the place to do it! + if let Some(summary) = opt_summary { + summaries_tx + .send((file_path.to_string(), summary.to_string())) + .await?; + } else { + log::warn!("No summary found for {:?}", &db_file); + } + } + Err(err) => { + log::error!( + "Error reading from summary database: {:?}", + err + ); + } + } + } + anyhow::Ok(()) + }) + })? + .await + })); + } + drop(summaries_tx); + + let project = self.project.clone(); + cx.spawn(|cx| async move { + let mut results_by_worker = Vec::new(); + for _ in 0..cx.background_executor().num_cpus() { + results_by_worker.push(Vec::::new()); + } + + cx.background_executor() + .scoped(|cx| { + for results in results_by_worker.iter_mut() { + cx.spawn(async { + while let Ok((filename, summary)) = summaries_rx.recv().await { + results.push(FileSummary { filename, summary }); + } + }); + } + }) + .await; + + for scan_task in futures::future::join_all(worktree_scan_tasks).await { + scan_task.log_err(); + } + + project.read_with(&cx, |_project, _cx| { + results_by_worker.into_iter().flatten().collect() + }) + }) + } + + /// Empty out the backlogs of all the worktrees in the project + pub fn flush_summary_backlogs(&self, cx: &AppContext) -> impl Future { + let flush_start = std::time::Instant::now(); + + futures::future::join_all(self.worktree_indices.values().map(|worktree_index| { + let worktree_index = worktree_index.clone(); + + cx.spawn(|cx| async move { + let index = match worktree_index { + WorktreeIndexHandle::Loading { index } => { + index.clone().await.map_err(|error| anyhow!(error))? + } + WorktreeIndexHandle::Loaded { index } => index.clone(), + }; + let worktree_abs_path = + cx.update(|cx| index.read(cx).worktree().read(cx).abs_path())?; + + index + .read_with(&cx, |index, cx| { + cx.background_executor() + .spawn(index.summary_index().flush_backlog(worktree_abs_path, cx)) + })? + .await + }) + })) + .map(move |results| { + // Log any errors, but don't block the user. These summaries are supposed to + // improve quality by providing extra context, but they aren't hard requirements! + for result in results { + if let Err(err) = result { + log::error!("Error flushing summary backlog: {:?}", err); + } + } + + log::info!("Summary backlog flushed in {:?}", flush_start.elapsed()); + }) + } + + pub fn remaining_summaries(&self, cx: &mut ModelContext) -> usize { + self.worktree_indices(cx) + .iter() + .map(|index| index.read(cx).summary_index().backlog_len()) + .sum() + } +} + +impl EventEmitter for ProjectIndex {} diff --git a/crates/semantic_index/src/project_index_debug_view.rs b/crates/semantic_index/src/project_index_debug_view.rs index e5881a24e71e61..d6628064ac0aee 100644 --- a/crates/semantic_index/src/project_index_debug_view.rs +++ b/crates/semantic_index/src/project_index_debug_view.rs @@ -55,8 +55,12 @@ impl ProjectIndexDebugView { for index in worktree_indices { let (root_path, worktree_id, worktree_paths) = index.read_with(&cx, |index, cx| { - let worktree = index.worktree.read(cx); - (worktree.abs_path(), worktree.id(), index.paths(cx)) + let worktree = index.worktree().read(cx); + ( + worktree.abs_path(), + worktree.id(), + index.embedding_index().paths(cx), + ) })?; rows.push(Row::Worktree(root_path)); rows.extend( @@ -82,10 +86,12 @@ impl ProjectIndexDebugView { cx: &mut ViewContext, ) -> Option<()> { let project_index = self.index.read(cx); - let fs = project_index.fs.clone(); + let fs = project_index.fs().clone(); let worktree_index = project_index.worktree_index(worktree_id, cx)?.read(cx); - let root_path = worktree_index.worktree.read(cx).abs_path(); - let chunks = worktree_index.chunks_for_path(file_path.clone(), cx); + let root_path = worktree_index.worktree().read(cx).abs_path(); + let chunks = worktree_index + .embedding_index() + .chunks_for_path(file_path.clone(), cx); cx.spawn(|this, mut cx| async move { let chunks = chunks.await?; diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index fad3a5d3e8554b..f2b325ead690b3 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -1,48 +1,35 @@ mod chunking; mod embedding; +mod embedding_index; +mod indexing; +mod project_index; mod project_index_debug_view; +mod summary_backlog; +mod summary_index; +mod worktree_index; + +use anyhow::{Context as _, Result}; +use collections::HashMap; +use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Context, Global, Model, WeakModel}; +use project::Project; +use project_index::ProjectIndex; +use std::{path::PathBuf, sync::Arc}; +use ui::ViewContext; +use workspace::Workspace; -use anyhow::{anyhow, Context as _, Result}; -use chunking::{chunk_text, Chunk}; -use collections::{Bound, HashMap, HashSet}; pub use embedding::*; -use fs::Fs; -use futures::{future::Shared, stream::StreamExt, FutureExt}; -use futures_batch::ChunksTimeoutStreamExt; -use gpui::{ - AppContext, AsyncAppContext, BorrowAppContext, Context, Entity, EntityId, EventEmitter, Global, - Model, ModelContext, Subscription, Task, WeakModel, -}; -use heed::types::{SerdeBincode, Str}; -use language::LanguageRegistry; -use parking_lot::Mutex; -use project::{Entry, Project, ProjectEntryId, UpdatedEntriesSet, Worktree, WorktreeId}; -use serde::{Deserialize, Serialize}; -use smol::channel; -use std::{ - cmp::Ordering, - future::Future, - iter, - num::NonZeroUsize, - ops::Range, - path::{Path, PathBuf}, - sync::{Arc, Weak}, - time::{Duration, SystemTime}, -}; -use util::ResultExt; -use worktree::Snapshot; - pub use project_index_debug_view::ProjectIndexDebugView; +pub use summary_index::FileSummary; -pub struct SemanticIndex { +pub struct SemanticDb { embedding_provider: Arc, db_connection: heed::Env, project_indices: HashMap, Model>, } -impl Global for SemanticIndex {} +impl Global for SemanticDb {} -impl SemanticIndex { +impl SemanticDb { pub async fn new( db_path: PathBuf, embedding_provider: Arc, @@ -62,7 +49,45 @@ impl SemanticIndex { .await .context("opening database connection")?; - Ok(SemanticIndex { + cx.update(|cx| { + cx.observe_new_views( + |workspace: &mut Workspace, cx: &mut ViewContext| { + let project = workspace.project().clone(); + + if cx.has_global::() { + cx.update_global::(|this, cx| { + let project_index = cx.new_model(|cx| { + ProjectIndex::new( + project.clone(), + this.db_connection.clone(), + this.embedding_provider.clone(), + cx, + ) + }); + + let project_weak = project.downgrade(); + this.project_indices + .insert(project_weak.clone(), project_index); + + cx.on_release(move |_, _, cx| { + if cx.has_global::() { + cx.update_global::(|this, _| { + this.project_indices.remove(&project_weak); + }) + } + }) + .detach(); + }) + } else { + log::info!("No SemanticDb, skipping project index") + } + }, + ) + .detach(); + }) + .ok(); + + Ok(SemanticDb { db_connection, embedding_provider, project_indices: HashMap::default(), @@ -72,985 +97,50 @@ impl SemanticIndex { pub fn project_index( &mut self, project: Model, - cx: &mut AppContext, - ) -> Model { - let project_weak = project.downgrade(); - project.update(cx, move |_, cx| { - cx.on_release(move |_, cx| { - if cx.has_global::() { - cx.update_global::(|this, _| { - this.project_indices.remove(&project_weak); - }) - } - }) - .detach(); - }); - - self.project_indices - .entry(project.downgrade()) - .or_insert_with(|| { - cx.new_model(|cx| { - ProjectIndex::new( - project, - self.db_connection.clone(), - self.embedding_provider.clone(), - cx, - ) - }) - }) - .clone() - } -} - -pub struct ProjectIndex { - db_connection: heed::Env, - project: WeakModel, - worktree_indices: HashMap, - language_registry: Arc, - fs: Arc, - last_status: Status, - status_tx: channel::Sender<()>, - embedding_provider: Arc, - _maintain_status: Task<()>, - _subscription: Subscription, -} - -#[derive(Clone)] -enum WorktreeIndexHandle { - Loading { - index: Shared, Arc>>>, - }, - Loaded { - index: Model, - }, -} - -impl ProjectIndex { - fn new( - project: Model, - db_connection: heed::Env, - embedding_provider: Arc, - cx: &mut ModelContext, - ) -> Self { - let language_registry = project.read(cx).languages().clone(); - let fs = project.read(cx).fs().clone(); - let (status_tx, mut status_rx) = channel::unbounded(); - let mut this = ProjectIndex { - db_connection, - project: project.downgrade(), - worktree_indices: HashMap::default(), - language_registry, - fs, - status_tx, - last_status: Status::Idle, - embedding_provider, - _subscription: cx.subscribe(&project, Self::handle_project_event), - _maintain_status: cx.spawn(|this, mut cx| async move { - while status_rx.next().await.is_some() { - if this - .update(&mut cx, |this, cx| this.update_status(cx)) - .is_err() - { - break; - } - } - }), - }; - this.update_worktree_indices(cx); - this - } - - pub fn status(&self) -> Status { - self.last_status - } - - pub fn project(&self) -> WeakModel { - self.project.clone() - } - - pub fn fs(&self) -> Arc { - self.fs.clone() - } - - fn handle_project_event( - &mut self, - _: Model, - event: &project::Event, - cx: &mut ModelContext, - ) { - match event { - project::Event::WorktreeAdded | project::Event::WorktreeRemoved(_) => { - self.update_worktree_indices(cx); - } - _ => {} - } - } - - fn update_worktree_indices(&mut self, cx: &mut ModelContext) { - let Some(project) = self.project.upgrade() else { - return; - }; - - let worktrees = project - .read(cx) - .visible_worktrees(cx) - .filter_map(|worktree| { - if worktree.read(cx).is_local() { - Some((worktree.entity_id(), worktree)) - } else { - None - } - }) - .collect::>(); - - self.worktree_indices - .retain(|worktree_id, _| worktrees.contains_key(worktree_id)); - for (worktree_id, worktree) in worktrees { - self.worktree_indices.entry(worktree_id).or_insert_with(|| { - let worktree_index = WorktreeIndex::load( - worktree.clone(), - self.db_connection.clone(), - self.language_registry.clone(), - self.fs.clone(), - self.status_tx.clone(), - self.embedding_provider.clone(), - cx, - ); - - let load_worktree = cx.spawn(|this, mut cx| async move { - let result = match worktree_index.await { - Ok(worktree_index) => { - this.update(&mut cx, |this, _| { - this.worktree_indices.insert( - worktree_id, - WorktreeIndexHandle::Loaded { - index: worktree_index.clone(), - }, - ); - })?; - Ok(worktree_index) - } - Err(error) => { - this.update(&mut cx, |this, _cx| { - this.worktree_indices.remove(&worktree_id) - })?; - Err(Arc::new(error)) - } - }; - - this.update(&mut cx, |this, cx| this.update_status(cx))?; - - result - }); - - WorktreeIndexHandle::Loading { - index: load_worktree.shared(), - } - }); - } - - self.update_status(cx); + _cx: &mut AppContext, + ) -> Option> { + self.project_indices.get(&project.downgrade()).cloned() } - fn update_status(&mut self, cx: &mut ModelContext) { - let mut indexing_count = 0; - let mut any_loading = false; - - for index in self.worktree_indices.values_mut() { - match index { - WorktreeIndexHandle::Loading { .. } => { - any_loading = true; - break; - } - WorktreeIndexHandle::Loaded { index, .. } => { - indexing_count += index.read(cx).entry_ids_being_indexed.len(); - } - } - } - - let status = if any_loading { - Status::Loading - } else if let Some(remaining_count) = NonZeroUsize::new(indexing_count) { - Status::Scanning { remaining_count } - } else { - Status::Idle - }; - - if status != self.last_status { - self.last_status = status; - cx.emit(status); - } - } - - pub fn search( - &self, - query: String, - limit: usize, - cx: &AppContext, - ) -> Task>> { - let (chunks_tx, chunks_rx) = channel::bounded(1024); - let mut worktree_scan_tasks = Vec::new(); - for worktree_index in self.worktree_indices.values() { - let worktree_index = worktree_index.clone(); - let chunks_tx = chunks_tx.clone(); - worktree_scan_tasks.push(cx.spawn(|cx| async move { - let index = match worktree_index { - WorktreeIndexHandle::Loading { index } => { - index.clone().await.map_err(|error| anyhow!(error))? - } - WorktreeIndexHandle::Loaded { index } => index.clone(), - }; - - index - .read_with(&cx, |index, cx| { - let worktree_id = index.worktree.read(cx).id(); - let db_connection = index.db_connection.clone(); - let db = index.db; - cx.background_executor().spawn(async move { - let txn = db_connection - .read_txn() - .context("failed to create read transaction")?; - let db_entries = db.iter(&txn).context("failed to iterate database")?; - for db_entry in db_entries { - let (_key, db_embedded_file) = db_entry?; - for chunk in db_embedded_file.chunks { - chunks_tx - .send((worktree_id, db_embedded_file.path.clone(), chunk)) - .await?; - } - } - anyhow::Ok(()) - }) - })? - .await - })); - } - drop(chunks_tx); - - let project = self.project.clone(); - let embedding_provider = self.embedding_provider.clone(); - cx.spawn(|cx| async move { - #[cfg(debug_assertions)] - let embedding_query_start = std::time::Instant::now(); - log::info!("Searching for {query}"); - - let query_embeddings = embedding_provider - .embed(&[TextToEmbed::new(&query)]) - .await?; - let query_embedding = query_embeddings - .into_iter() - .next() - .ok_or_else(|| anyhow!("no embedding for query"))?; - - let mut results_by_worker = Vec::new(); - for _ in 0..cx.background_executor().num_cpus() { - results_by_worker.push(Vec::::new()); - } - - #[cfg(debug_assertions)] - let search_start = std::time::Instant::now(); - - cx.background_executor() - .scoped(|cx| { - for results in results_by_worker.iter_mut() { - cx.spawn(async { - while let Ok((worktree_id, path, chunk)) = chunks_rx.recv().await { - let score = chunk.embedding.similarity(&query_embedding); - let ix = match results.binary_search_by(|probe| { - score.partial_cmp(&probe.score).unwrap_or(Ordering::Equal) - }) { - Ok(ix) | Err(ix) => ix, - }; - results.insert( - ix, - WorktreeSearchResult { - worktree_id, - path: path.clone(), - range: chunk.chunk.range.clone(), - score, - }, - ); - results.truncate(limit); - } - }); - } - }) - .await; - - for scan_task in futures::future::join_all(worktree_scan_tasks).await { - scan_task.log_err(); - } - - project.read_with(&cx, |project, cx| { - let mut search_results = Vec::with_capacity(results_by_worker.len() * limit); - for worker_results in results_by_worker { - search_results.extend(worker_results.into_iter().filter_map(|result| { - Some(SearchResult { - worktree: project.worktree_for_id(result.worktree_id, cx)?, - path: result.path, - range: result.range, - score: result.score, - }) - })); - } - search_results.sort_unstable_by(|a, b| { - b.score.partial_cmp(&a.score).unwrap_or(Ordering::Equal) - }); - search_results.truncate(limit); - - #[cfg(debug_assertions)] - { - let search_elapsed = search_start.elapsed(); - log::debug!( - "searched {} entries in {:?}", - search_results.len(), - search_elapsed - ); - let embedding_query_elapsed = embedding_query_start.elapsed(); - log::debug!("embedding query took {:?}", embedding_query_elapsed); - } - - search_results - }) - }) - } - - #[cfg(test)] - pub fn path_count(&self, cx: &AppContext) -> Result { - let mut result = 0; - for worktree_index in self.worktree_indices.values() { - if let WorktreeIndexHandle::Loaded { index, .. } = worktree_index { - result += index.read(cx).path_count()?; - } - } - Ok(result) - } - - pub(crate) fn worktree_index( + pub fn remaining_summaries( &self, - worktree_id: WorktreeId, - cx: &AppContext, - ) -> Option> { - for index in self.worktree_indices.values() { - if let WorktreeIndexHandle::Loaded { index, .. } = index { - if index.read(cx).worktree.read(cx).id() == worktree_id { - return Some(index.clone()); - } - } - } - None - } - - pub(crate) fn worktree_indices(&self, cx: &AppContext) -> Vec> { - let mut result = self - .worktree_indices - .values() - .filter_map(|index| { - if let WorktreeIndexHandle::Loaded { index, .. } = index { - Some(index.clone()) - } else { - None - } - }) - .collect::>(); - result.sort_by_key(|index| index.read(cx).worktree.read(cx).id()); - result - } -} - -pub struct SearchResult { - pub worktree: Model, - pub path: Arc, - pub range: Range, - pub score: f32, -} - -pub struct WorktreeSearchResult { - pub worktree_id: WorktreeId, - pub path: Arc, - pub range: Range, - pub score: f32, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -pub enum Status { - Idle, - Loading, - Scanning { remaining_count: NonZeroUsize }, -} - -impl EventEmitter for ProjectIndex {} - -struct WorktreeIndex { - worktree: Model, - db_connection: heed::Env, - db: heed::Database>, - language_registry: Arc, - fs: Arc, - embedding_provider: Arc, - entry_ids_being_indexed: Arc, - _index_entries: Task>, - _subscription: Subscription, -} - -impl WorktreeIndex { - pub fn load( - worktree: Model, - db_connection: heed::Env, - language_registry: Arc, - fs: Arc, - status_tx: channel::Sender<()>, - embedding_provider: Arc, + project: &WeakModel, cx: &mut AppContext, - ) -> Task>> { - let worktree_abs_path = worktree.read(cx).abs_path(); - cx.spawn(|mut cx| async move { - let db = cx - .background_executor() - .spawn({ - let db_connection = db_connection.clone(); - async move { - let mut txn = db_connection.write_txn()?; - let db_name = worktree_abs_path.to_string_lossy(); - let db = db_connection.create_database(&mut txn, Some(&db_name))?; - txn.commit()?; - anyhow::Ok(db) - } - }) - .await?; - cx.new_model(|cx| { - Self::new( - worktree, - db_connection, - db, - status_tx, - language_registry, - fs, - embedding_provider, - cx, - ) + ) -> Option { + self.project_indices.get(project).map(|project_index| { + project_index.update(cx, |project_index, cx| { + project_index.remaining_summaries(cx) }) }) } - - #[allow(clippy::too_many_arguments)] - fn new( - worktree: Model, - db_connection: heed::Env, - db: heed::Database>, - status: channel::Sender<()>, - language_registry: Arc, - fs: Arc, - embedding_provider: Arc, - cx: &mut ModelContext, - ) -> Self { - let (updated_entries_tx, updated_entries_rx) = channel::unbounded(); - let _subscription = cx.subscribe(&worktree, move |_this, _worktree, event, _cx| { - if let worktree::Event::UpdatedEntries(update) = event { - _ = updated_entries_tx.try_send(update.clone()); - } - }); - - Self { - db_connection, - db, - worktree, - language_registry, - fs, - embedding_provider, - entry_ids_being_indexed: Arc::new(IndexingEntrySet::new(status)), - _index_entries: cx.spawn(|this, cx| Self::index_entries(this, updated_entries_rx, cx)), - _subscription, - } - } - - async fn index_entries( - this: WeakModel, - updated_entries: channel::Receiver, - mut cx: AsyncAppContext, - ) -> Result<()> { - let index = this.update(&mut cx, |this, cx| this.index_entries_changed_on_disk(cx))?; - index.await.log_err(); - - while let Ok(updated_entries) = updated_entries.recv().await { - let index = this.update(&mut cx, |this, cx| { - this.index_updated_entries(updated_entries, cx) - })?; - index.await.log_err(); - } - - Ok(()) - } - - fn index_entries_changed_on_disk(&self, cx: &AppContext) -> impl Future> { - let worktree = self.worktree.read(cx).snapshot(); - let worktree_abs_path = worktree.abs_path().clone(); - let scan = self.scan_entries(worktree, cx); - let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx); - let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx); - let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx); - async move { - futures::try_join!(scan.task, chunk.task, embed.task, persist)?; - Ok(()) - } - } - - fn index_updated_entries( - &self, - updated_entries: UpdatedEntriesSet, - cx: &AppContext, - ) -> impl Future> { - let worktree = self.worktree.read(cx).snapshot(); - let worktree_abs_path = worktree.abs_path().clone(); - let scan = self.scan_updated_entries(worktree, updated_entries.clone(), cx); - let chunk = self.chunk_files(worktree_abs_path, scan.updated_entries, cx); - let embed = Self::embed_files(self.embedding_provider.clone(), chunk.files, cx); - let persist = self.persist_embeddings(scan.deleted_entry_ranges, embed.files, cx); - async move { - futures::try_join!(scan.task, chunk.task, embed.task, persist)?; - Ok(()) - } - } - - fn scan_entries(&self, worktree: Snapshot, cx: &AppContext) -> ScanEntries { - let (updated_entries_tx, updated_entries_rx) = channel::bounded(512); - let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); - let db_connection = self.db_connection.clone(); - let db = self.db; - let entries_being_indexed = self.entry_ids_being_indexed.clone(); - let task = cx.background_executor().spawn(async move { - let txn = db_connection - .read_txn() - .context("failed to create read transaction")?; - let mut db_entries = db - .iter(&txn) - .context("failed to create iterator")? - .move_between_keys() - .peekable(); - - let mut deletion_range: Option<(Bound<&str>, Bound<&str>)> = None; - for entry in worktree.files(false, 0) { - let entry_db_key = db_key_for_path(&entry.path); - - let mut saved_mtime = None; - while let Some(db_entry) = db_entries.peek() { - match db_entry { - Ok((db_path, db_embedded_file)) => match (*db_path).cmp(&entry_db_key) { - Ordering::Less => { - if let Some(deletion_range) = deletion_range.as_mut() { - deletion_range.1 = Bound::Included(db_path); - } else { - deletion_range = - Some((Bound::Included(db_path), Bound::Included(db_path))); - } - - db_entries.next(); - } - Ordering::Equal => { - if let Some(deletion_range) = deletion_range.take() { - deleted_entry_ranges_tx - .send(( - deletion_range.0.map(ToString::to_string), - deletion_range.1.map(ToString::to_string), - )) - .await?; - } - saved_mtime = db_embedded_file.mtime; - db_entries.next(); - break; - } - Ordering::Greater => { - break; - } - }, - Err(_) => return Err(db_entries.next().unwrap().unwrap_err())?, - } - } - - if entry.mtime != saved_mtime { - let handle = entries_being_indexed.insert(entry.id); - updated_entries_tx.send((entry.clone(), handle)).await?; - } - } - - if let Some(db_entry) = db_entries.next() { - let (db_path, _) = db_entry?; - deleted_entry_ranges_tx - .send((Bound::Included(db_path.to_string()), Bound::Unbounded)) - .await?; - } - - Ok(()) - }); - - ScanEntries { - updated_entries: updated_entries_rx, - deleted_entry_ranges: deleted_entry_ranges_rx, - task, - } - } - - fn scan_updated_entries( - &self, - worktree: Snapshot, - updated_entries: UpdatedEntriesSet, - cx: &AppContext, - ) -> ScanEntries { - let (updated_entries_tx, updated_entries_rx) = channel::bounded(512); - let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); - let entries_being_indexed = self.entry_ids_being_indexed.clone(); - let task = cx.background_executor().spawn(async move { - for (path, entry_id, status) in updated_entries.iter() { - match status { - project::PathChange::Added - | project::PathChange::Updated - | project::PathChange::AddedOrUpdated => { - if let Some(entry) = worktree.entry_for_id(*entry_id) { - if entry.is_file() { - let handle = entries_being_indexed.insert(entry.id); - updated_entries_tx.send((entry.clone(), handle)).await?; - } - } - } - project::PathChange::Removed => { - let db_path = db_key_for_path(path); - deleted_entry_ranges_tx - .send((Bound::Included(db_path.clone()), Bound::Included(db_path))) - .await?; - } - project::PathChange::Loaded => { - // Do nothing. - } - } - } - - Ok(()) - }); - - ScanEntries { - updated_entries: updated_entries_rx, - deleted_entry_ranges: deleted_entry_ranges_rx, - task, - } - } - - fn chunk_files( - &self, - worktree_abs_path: Arc, - entries: channel::Receiver<(Entry, IndexingEntryHandle)>, - cx: &AppContext, - ) -> ChunkFiles { - let language_registry = self.language_registry.clone(); - let fs = self.fs.clone(); - let (chunked_files_tx, chunked_files_rx) = channel::bounded(2048); - let task = cx.spawn(|cx| async move { - cx.background_executor() - .scoped(|cx| { - for _ in 0..cx.num_cpus() { - cx.spawn(async { - while let Ok((entry, handle)) = entries.recv().await { - let entry_abs_path = worktree_abs_path.join(&entry.path); - let Some(text) = fs - .load(&entry_abs_path) - .await - .with_context(|| { - format!("failed to read path {entry_abs_path:?}") - }) - .log_err() - else { - continue; - }; - let language = language_registry - .language_for_file_path(&entry.path) - .await - .ok(); - let chunked_file = ChunkedFile { - chunks: chunk_text(&text, language.as_ref(), &entry.path), - handle, - path: entry.path, - mtime: entry.mtime, - text, - }; - - if chunked_files_tx.send(chunked_file).await.is_err() { - return; - } - } - }); - } - }) - .await; - Ok(()) - }); - - ChunkFiles { - files: chunked_files_rx, - task, - } - } - - fn embed_files( - embedding_provider: Arc, - chunked_files: channel::Receiver, - cx: &AppContext, - ) -> EmbedFiles { - let embedding_provider = embedding_provider.clone(); - let (embedded_files_tx, embedded_files_rx) = channel::bounded(512); - let task = cx.background_executor().spawn(async move { - let mut chunked_file_batches = - chunked_files.chunks_timeout(512, Duration::from_secs(2)); - while let Some(chunked_files) = chunked_file_batches.next().await { - // View the batch of files as a vec of chunks - // Flatten out to a vec of chunks that we can subdivide into batch sized pieces - // Once those are done, reassemble them back into the files in which they belong - // If any embeddings fail for a file, the entire file is discarded - - let chunks: Vec = chunked_files - .iter() - .flat_map(|file| { - file.chunks.iter().map(|chunk| TextToEmbed { - text: &file.text[chunk.range.clone()], - digest: chunk.digest, - }) - }) - .collect::>(); - - let mut embeddings: Vec> = Vec::new(); - for embedding_batch in chunks.chunks(embedding_provider.batch_size()) { - if let Some(batch_embeddings) = - embedding_provider.embed(embedding_batch).await.log_err() - { - if batch_embeddings.len() == embedding_batch.len() { - embeddings.extend(batch_embeddings.into_iter().map(Some)); - continue; - } - log::error!( - "embedding provider returned unexpected embedding count {}, expected {}", - batch_embeddings.len(), embedding_batch.len() - ); - } - - embeddings.extend(iter::repeat(None).take(embedding_batch.len())); - } - - let mut embeddings = embeddings.into_iter(); - for chunked_file in chunked_files { - let mut embedded_file = EmbeddedFile { - path: chunked_file.path, - mtime: chunked_file.mtime, - chunks: Vec::new(), - }; - - let mut embedded_all_chunks = true; - for (chunk, embedding) in - chunked_file.chunks.into_iter().zip(embeddings.by_ref()) - { - if let Some(embedding) = embedding { - embedded_file - .chunks - .push(EmbeddedChunk { chunk, embedding }); - } else { - embedded_all_chunks = false; - } - } - - if embedded_all_chunks { - embedded_files_tx - .send((embedded_file, chunked_file.handle)) - .await?; - } - } - } - Ok(()) - }); - - EmbedFiles { - files: embedded_files_rx, - task, - } - } - - fn persist_embeddings( - &self, - mut deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, - embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, - cx: &AppContext, - ) -> Task> { - let db_connection = self.db_connection.clone(); - let db = self.db; - cx.background_executor().spawn(async move { - while let Some(deletion_range) = deleted_entry_ranges.next().await { - let mut txn = db_connection.write_txn()?; - let start = deletion_range.0.as_ref().map(|start| start.as_str()); - let end = deletion_range.1.as_ref().map(|end| end.as_str()); - log::debug!("deleting embeddings in range {:?}", &(start, end)); - db.delete_range(&mut txn, &(start, end))?; - txn.commit()?; - } - - let mut embedded_files = embedded_files.chunks_timeout(4096, Duration::from_secs(2)); - while let Some(embedded_files) = embedded_files.next().await { - let mut txn = db_connection.write_txn()?; - for (file, _) in &embedded_files { - log::debug!("saving embedding for file {:?}", file.path); - let key = db_key_for_path(&file.path); - db.put(&mut txn, &key, file)?; - } - txn.commit()?; - - drop(embedded_files); - log::debug!("committed"); - } - - Ok(()) - }) - } - - fn paths(&self, cx: &AppContext) -> Task>>> { - let connection = self.db_connection.clone(); - let db = self.db; - cx.background_executor().spawn(async move { - let tx = connection - .read_txn() - .context("failed to create read transaction")?; - let result = db - .iter(&tx)? - .map(|entry| Ok(entry?.1.path.clone())) - .collect::>>>(); - drop(tx); - result - }) - } - - fn chunks_for_path( - &self, - path: Arc, - cx: &AppContext, - ) -> Task>> { - let connection = self.db_connection.clone(); - let db = self.db; - cx.background_executor().spawn(async move { - let tx = connection - .read_txn() - .context("failed to create read transaction")?; - Ok(db - .get(&tx, &db_key_for_path(&path))? - .ok_or_else(|| anyhow!("no such path"))? - .chunks - .clone()) - }) - } - - #[cfg(test)] - fn path_count(&self) -> Result { - let txn = self - .db_connection - .read_txn() - .context("failed to create read transaction")?; - Ok(self.db.len(&txn)?) - } -} - -struct ScanEntries { - updated_entries: channel::Receiver<(Entry, IndexingEntryHandle)>, - deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, - task: Task>, -} - -struct ChunkFiles { - files: channel::Receiver, - task: Task>, -} - -struct ChunkedFile { - pub path: Arc, - pub mtime: Option, - pub handle: IndexingEntryHandle, - pub text: String, - pub chunks: Vec, -} - -struct EmbedFiles { - files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, - task: Task>, -} - -#[derive(Debug, Serialize, Deserialize)] -struct EmbeddedFile { - path: Arc, - mtime: Option, - chunks: Vec, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -struct EmbeddedChunk { - chunk: Chunk, - embedding: Embedding, -} - -/// The set of entries that are currently being indexed. -struct IndexingEntrySet { - entry_ids: Mutex>, - tx: channel::Sender<()>, -} - -/// When dropped, removes the entry from the set of entries that are being indexed. -#[derive(Clone)] -struct IndexingEntryHandle { - entry_id: ProjectEntryId, - set: Weak, -} - -impl IndexingEntrySet { - fn new(tx: channel::Sender<()>) -> Self { - Self { - entry_ids: Default::default(), - tx, - } - } - - fn insert(self: &Arc, entry_id: ProjectEntryId) -> IndexingEntryHandle { - self.entry_ids.lock().insert(entry_id); - self.tx.send_blocking(()).ok(); - IndexingEntryHandle { - entry_id, - set: Arc::downgrade(self), - } - } - - pub fn len(&self) -> usize { - self.entry_ids.lock().len() - } -} - -impl Drop for IndexingEntryHandle { - fn drop(&mut self) { - if let Some(set) = self.set.upgrade() { - set.tx.send_blocking(()).ok(); - set.entry_ids.lock().remove(&self.entry_id); - } - } -} - -fn db_key_for_path(path: &Arc) -> String { - path.to_string_lossy().replace('/', "\0") } #[cfg(test)] mod tests { use super::*; + use anyhow::anyhow; + use chunking::Chunk; + use embedding_index::{ChunkedFile, EmbeddingIndex}; + use feature_flags::FeatureFlagAppExt; + use fs::FakeFs; use futures::{future::BoxFuture, FutureExt}; use gpui::TestAppContext; + use indexing::IndexingEntrySet; use language::language_settings::AllLanguageSettings; - use project::Project; + use project::{Project, ProjectEntryId}; + use serde_json::json; use settings::SettingsStore; + use smol::{channel, stream::StreamExt}; use std::{future, path::Path, sync::Arc}; fn init_test(cx: &mut TestAppContext) { + env_logger::try_init().ok(); + cx.update(|cx| { let store = SettingsStore::test(cx); cx.set_global(store); language::init(cx); + cx.update_flags(false, vec![]); Project::init_settings(cx); SettingsStore::update(cx, |store, cx| { store.update_user_settings::(cx, |_| {}); @@ -1100,7 +190,7 @@ mod tests { let temp_dir = tempfile::tempdir().unwrap(); - let mut semantic_index = SemanticIndex::new( + let mut semantic_index = SemanticDb::new( temp_dir.path().into(), Arc::new(TestEmbeddingProvider::new(16, |text| { let mut embedding = vec![0f32; 2]; @@ -1124,26 +214,57 @@ mod tests { .await .unwrap(); - let project_path = Path::new("./fixture"); + let fs = FakeFs::new(cx.executor()); + let project_path = Path::new("/fake_project"); - let project = cx - .spawn(|mut cx| async move { Project::example([project_path], &mut cx).await }) - .await; + fs.insert_tree( + project_path, + json!({ + "fixture": { + "main.rs": include_str!("../fixture/main.rs"), + "needle.md": include_str!("../fixture/needle.md"), + } + }), + ) + .await; + + let project = Project::test(fs, [project_path], cx).await; cx.update(|cx| { let language_registry = project.read(cx).languages().clone(); let node_runtime = project.read(cx).node_runtime().unwrap().clone(); languages::init(language_registry, node_runtime, cx); + + // Manually create and insert the ProjectIndex + let project_index = cx.new_model(|cx| { + ProjectIndex::new( + project.clone(), + semantic_index.db_connection.clone(), + semantic_index.embedding_provider.clone(), + cx, + ) + }); + semantic_index + .project_indices + .insert(project.downgrade(), project_index); }); - let project_index = cx.update(|cx| semantic_index.project_index(project.clone(), cx)); + let project_index = cx + .update(|_cx| { + semantic_index + .project_indices + .get(&project.downgrade()) + .cloned() + }) + .unwrap(); - while project_index - .read_with(cx, |index, cx| index.path_count(cx)) + cx.run_until_parked(); + while cx + .update(|cx| semantic_index.remaining_summaries(&project.downgrade(), cx)) .unwrap() - == 0 + > 0 { - project_index.next_event(cx).await; + cx.run_until_parked(); } let results = cx @@ -1155,7 +276,11 @@ mod tests { .await .unwrap(); - assert!(results.len() > 1, "should have found some results"); + assert!( + results.len() > 1, + "should have found some results, but only found {:?}", + results + ); for result in &results { println!("result: {:?}", result.path); @@ -1165,7 +290,7 @@ mod tests { // Find result that is greater than 0.5 let search_result = results.iter().find(|result| result.score > 0.9).unwrap(); - assert_eq!(search_result.path.to_string_lossy(), "needle.md"); + assert_eq!(search_result.path.to_string_lossy(), "fixture/needle.md"); let content = cx .update(|cx| { @@ -1236,7 +361,7 @@ mod tests { chunked_files_tx.close(); let embed_files_task = - cx.update(|cx| WorktreeIndex::embed_files(provider.clone(), chunked_files_rx, cx)); + cx.update(|cx| EmbeddingIndex::embed_files(provider.clone(), chunked_files_rx, cx)); embed_files_task.task.await.unwrap(); let mut embedded_files_rx = embed_files_task.files; diff --git a/crates/semantic_index/src/summary_backlog.rs b/crates/semantic_index/src/summary_backlog.rs new file mode 100644 index 00000000000000..c6d8e33a45df83 --- /dev/null +++ b/crates/semantic_index/src/summary_backlog.rs @@ -0,0 +1,48 @@ +use collections::HashMap; +use std::{path::Path, sync::Arc, time::SystemTime}; + +const MAX_FILES_BEFORE_RESUMMARIZE: usize = 4; +const MAX_BYTES_BEFORE_RESUMMARIZE: u64 = 1_000_000; // 1 MB + +#[derive(Default, Debug)] +pub struct SummaryBacklog { + /// Key: path to a file that needs summarization, but that we haven't summarized yet. Value: that file's size on disk, in bytes, and its mtime. + files: HashMap, (u64, Option)>, + /// Cache of the sum of all values in `files`, so we don't have to traverse the whole map to check if we're over the byte limit. + total_bytes: u64, +} + +impl SummaryBacklog { + /// Store the given path in the backlog, along with how many bytes are in it. + pub fn insert(&mut self, path: Arc, bytes_on_disk: u64, mtime: Option) { + let (prev_bytes, _) = self + .files + .insert(path, (bytes_on_disk, mtime)) + .unwrap_or_default(); // Default to 0 prev_bytes + + // Update the cached total by subtracting out the old amount and adding the new one. + self.total_bytes = self.total_bytes - prev_bytes + bytes_on_disk; + } + + /// Returns true if the total number of bytes in the backlog exceeds a predefined threshold. + pub fn needs_drain(&self) -> bool { + self.files.len() > MAX_FILES_BEFORE_RESUMMARIZE || + // The whole purpose of the cached total_bytes is to make this comparison cheap. + // Otherwise we'd have to traverse the entire dictionary every time we wanted this answer. + self.total_bytes > MAX_BYTES_BEFORE_RESUMMARIZE + } + + /// Remove all the entries in the backlog and return the file paths as an iterator. + #[allow(clippy::needless_lifetimes)] // Clippy thinks this 'a can be elided, but eliding it gives a compile error + pub fn drain<'a>(&'a mut self) -> impl Iterator, Option)> + 'a { + self.total_bytes = 0; + + self.files + .drain() + .map(|(path, (_size, mtime))| (path, mtime)) + } + + pub fn len(&self) -> usize { + self.files.len() + } +} diff --git a/crates/semantic_index/src/summary_index.rs b/crates/semantic_index/src/summary_index.rs new file mode 100644 index 00000000000000..08f25ae0287fa2 --- /dev/null +++ b/crates/semantic_index/src/summary_index.rs @@ -0,0 +1,693 @@ +use anyhow::{anyhow, Context as _, Result}; +use arrayvec::ArrayString; +use fs::Fs; +use futures::{stream::StreamExt, TryFutureExt}; +use futures_batch::ChunksTimeoutStreamExt; +use gpui::{AppContext, Model, Task}; +use heed::{ + types::{SerdeBincode, Str}, + RoTxn, +}; +use language_model::{ + LanguageModelCompletionEvent, LanguageModelId, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, Role, +}; +use log; +use parking_lot::Mutex; +use project::{Entry, UpdatedEntriesSet, Worktree}; +use serde::{Deserialize, Serialize}; +use smol::channel; +use std::{ + future::Future, + path::Path, + sync::Arc, + time::{Duration, Instant, SystemTime}, +}; +use util::ResultExt; +use worktree::Snapshot; + +use crate::{indexing::IndexingEntrySet, summary_backlog::SummaryBacklog}; + +#[derive(Serialize, Deserialize, Debug)] +pub struct FileSummary { + pub filename: String, + pub summary: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct UnsummarizedFile { + // Path to the file on disk + path: Arc, + // The mtime of the file on disk + mtime: Option, + // BLAKE3 hash of the source file's contents + digest: Blake3Digest, + // The source file's contents + contents: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct SummarizedFile { + // Path to the file on disk + path: String, + // The mtime of the file on disk + mtime: Option, + // BLAKE3 hash of the source file's contents + digest: Blake3Digest, + // The LLM's summary of the file's contents + summary: String, +} + +/// This is what blake3's to_hex() method returns - see https://docs.rs/blake3/1.5.3/src/blake3/lib.rs.html#246 +pub type Blake3Digest = ArrayString<{ blake3::OUT_LEN * 2 }>; + +#[derive(Debug, Serialize, Deserialize)] +pub struct FileDigest { + pub mtime: Option, + pub digest: Blake3Digest, +} + +struct NeedsSummary { + files: channel::Receiver, + task: Task>, +} + +struct SummarizeFiles { + files: channel::Receiver, + task: Task>, +} + +pub struct SummaryIndex { + worktree: Model, + fs: Arc, + db_connection: heed::Env, + file_digest_db: heed::Database>, // Key: file path. Val: BLAKE3 digest of its contents. + summary_db: heed::Database, Str>, // Key: BLAKE3 digest of a file's contents. Val: LLM summary of those contents. + backlog: Arc>, + _entry_ids_being_indexed: Arc, // TODO can this be removed? +} + +struct Backlogged { + paths_to_digest: channel::Receiver, Option)>>, + task: Task>, +} + +struct MightNeedSummaryFiles { + files: channel::Receiver, + task: Task>, +} + +impl SummaryIndex { + pub fn new( + worktree: Model, + fs: Arc, + db_connection: heed::Env, + file_digest_db: heed::Database>, + summary_db: heed::Database, Str>, + _entry_ids_being_indexed: Arc, + ) -> Self { + Self { + worktree, + fs, + db_connection, + file_digest_db, + summary_db, + _entry_ids_being_indexed, + backlog: Default::default(), + } + } + + pub fn file_digest_db(&self) -> heed::Database> { + self.file_digest_db + } + + pub fn summary_db(&self) -> heed::Database, Str> { + self.summary_db + } + + pub fn index_entries_changed_on_disk( + &self, + is_auto_available: bool, + cx: &AppContext, + ) -> impl Future> { + let start = Instant::now(); + let backlogged; + let digest; + let needs_summary; + let summaries; + let persist; + + if is_auto_available { + let worktree = self.worktree.read(cx).snapshot(); + let worktree_abs_path = worktree.abs_path().clone(); + + backlogged = self.scan_entries(worktree, cx); + digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx); + needs_summary = self.check_summary_cache(digest.files, cx); + summaries = self.summarize_files(needs_summary.files, cx); + persist = self.persist_summaries(summaries.files, cx); + } else { + // This feature is only staff-shipped, so make the rest of these no-ops. + backlogged = Backlogged { + paths_to_digest: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + digest = MightNeedSummaryFiles { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + needs_summary = NeedsSummary { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + summaries = SummarizeFiles { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + persist = Task::ready(Ok(())); + } + + async move { + futures::try_join!( + backlogged.task, + digest.task, + needs_summary.task, + summaries.task, + persist + )?; + + if is_auto_available { + log::info!( + "Summarizing everything that changed on disk took {:?}", + start.elapsed() + ); + } + + Ok(()) + } + } + + pub fn index_updated_entries( + &mut self, + updated_entries: UpdatedEntriesSet, + is_auto_available: bool, + cx: &AppContext, + ) -> impl Future> { + let start = Instant::now(); + let backlogged; + let digest; + let needs_summary; + let summaries; + let persist; + + if is_auto_available { + let worktree = self.worktree.read(cx).snapshot(); + let worktree_abs_path = worktree.abs_path().clone(); + + backlogged = self.scan_updated_entries(worktree, updated_entries.clone(), cx); + digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx); + needs_summary = self.check_summary_cache(digest.files, cx); + summaries = self.summarize_files(needs_summary.files, cx); + persist = self.persist_summaries(summaries.files, cx); + } else { + // This feature is only staff-shipped, so make the rest of these no-ops. + backlogged = Backlogged { + paths_to_digest: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + digest = MightNeedSummaryFiles { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + needs_summary = NeedsSummary { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + summaries = SummarizeFiles { + files: channel::unbounded().1, + task: Task::ready(Ok(())), + }; + persist = Task::ready(Ok(())); + } + + async move { + futures::try_join!( + backlogged.task, + digest.task, + needs_summary.task, + summaries.task, + persist + )?; + + log::info!("Summarizing updated entries took {:?}", start.elapsed()); + + Ok(()) + } + } + + fn check_summary_cache( + &self, + mut might_need_summary: channel::Receiver, + cx: &AppContext, + ) -> NeedsSummary { + let db_connection = self.db_connection.clone(); + let db = self.summary_db; + let (needs_summary_tx, needs_summary_rx) = channel::bounded(512); + let task = cx.background_executor().spawn(async move { + while let Some(file) = might_need_summary.next().await { + let tx = db_connection + .read_txn() + .context("Failed to create read transaction for checking which hashes are in summary cache")?; + + match db.get(&tx, &file.digest) { + Ok(opt_answer) => { + if opt_answer.is_none() { + // It's not in the summary cache db, so we need to summarize it. + log::debug!("File {:?} (digest {:?}) was NOT in the db cache and needs to be resummarized.", file.path.display(), &file.digest); + needs_summary_tx.send(file).await?; + } else { + log::debug!("File {:?} (digest {:?}) was in the db cache and does not need to be resummarized.", file.path.display(), &file.digest); + } + } + Err(err) => { + log::error!("Reading from the summaries database failed: {:?}", err); + } + } + } + + Ok(()) + }); + + NeedsSummary { + files: needs_summary_rx, + task, + } + } + + fn scan_entries(&self, worktree: Snapshot, cx: &AppContext) -> Backlogged { + let (tx, rx) = channel::bounded(512); + let db_connection = self.db_connection.clone(); + let digest_db = self.file_digest_db; + let backlog = Arc::clone(&self.backlog); + let task = cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create read transaction")?; + + for entry in worktree.files(false, 0) { + let needs_summary = + Self::add_to_backlog(Arc::clone(&backlog), digest_db, &txn, entry); + + if !needs_summary.is_empty() { + tx.send(needs_summary).await?; + } + } + + // TODO delete db entries for deleted files + + Ok(()) + }); + + Backlogged { + paths_to_digest: rx, + task, + } + } + + fn add_to_backlog( + backlog: Arc>, + digest_db: heed::Database>, + txn: &RoTxn<'_>, + entry: &Entry, + ) -> Vec<(Arc, Option)> { + let entry_db_key = db_key_for_path(&entry.path); + + match digest_db.get(&txn, &entry_db_key) { + Ok(opt_saved_digest) => { + // The file path is the same, but the mtime is different. (Or there was no mtime.) + // It needs updating, so add it to the backlog! Then, if the backlog is full, drain it and summarize its contents. + if entry.mtime != opt_saved_digest.and_then(|digest| digest.mtime) { + let mut backlog = backlog.lock(); + + log::info!( + "Inserting {:?} ({:?} bytes) into backlog", + &entry.path, + entry.size, + ); + backlog.insert(Arc::clone(&entry.path), entry.size, entry.mtime); + + if backlog.needs_drain() { + log::info!("Draining summary backlog..."); + return backlog.drain().collect(); + } + } + } + Err(err) => { + log::error!( + "Error trying to get file digest db entry {:?}: {:?}", + &entry_db_key, + err + ); + } + } + + Vec::new() + } + + fn scan_updated_entries( + &self, + worktree: Snapshot, + updated_entries: UpdatedEntriesSet, + cx: &AppContext, + ) -> Backlogged { + log::info!("Scanning for updated entries that might need summarization..."); + let (tx, rx) = channel::bounded(512); + // let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128); + let db_connection = self.db_connection.clone(); + let digest_db = self.file_digest_db; + let backlog = Arc::clone(&self.backlog); + let task = cx.background_executor().spawn(async move { + let txn = db_connection + .read_txn() + .context("failed to create read transaction")?; + + for (path, entry_id, status) in updated_entries.iter() { + match status { + project::PathChange::Loaded + | project::PathChange::Added + | project::PathChange::Updated + | project::PathChange::AddedOrUpdated => { + if let Some(entry) = worktree.entry_for_id(*entry_id) { + if entry.is_file() { + let needs_summary = Self::add_to_backlog( + Arc::clone(&backlog), + digest_db, + &txn, + entry, + ); + + if !needs_summary.is_empty() { + tx.send(needs_summary).await?; + } + } + } + } + project::PathChange::Removed => { + let _db_path = db_key_for_path(path); + // TODO delete db entries for deleted files + // deleted_entry_ranges_tx + // .send((Bound::Included(db_path.clone()), Bound::Included(db_path))) + // .await?; + } + } + } + + Ok(()) + }); + + Backlogged { + paths_to_digest: rx, + // deleted_entry_ranges: deleted_entry_ranges_rx, + task, + } + } + + fn digest_files( + &self, + paths: channel::Receiver, Option)>>, + worktree_abs_path: Arc, + cx: &AppContext, + ) -> MightNeedSummaryFiles { + let fs = self.fs.clone(); + let (rx, tx) = channel::bounded(2048); + let task = cx.spawn(|cx| async move { + cx.background_executor() + .scoped(|cx| { + for _ in 0..cx.num_cpus() { + cx.spawn(async { + while let Ok(pairs) = paths.recv().await { + // Note: we could process all these files concurrently if desired. Might or might not speed things up. + for (path, mtime) in pairs { + let entry_abs_path = worktree_abs_path.join(&path); + + // Load the file's contents and compute its hash digest. + let unsummarized_file = { + let Some(contents) = fs + .load(&entry_abs_path) + .await + .with_context(|| { + format!("failed to read path {entry_abs_path:?}") + }) + .log_err() + else { + continue; + }; + + let digest = { + let mut hasher = blake3::Hasher::new(); + // Incorporate both the (relative) file path as well as the contents of the file into the hash. + // This is because in some languages and frameworks, identical files can do different things + // depending on their paths (e.g. Rails controllers). It's also why we send the path to the model. + hasher.update(path.display().to_string().as_bytes()); + hasher.update(contents.as_bytes()); + hasher.finalize().to_hex() + }; + + UnsummarizedFile { + digest, + contents, + path, + mtime, + } + }; + + if let Err(err) = rx + .send(unsummarized_file) + .map_err(|error| anyhow!(error)) + .await + { + log::error!("Error: {:?}", err); + + return; + } + } + } + }); + } + }) + .await; + Ok(()) + }); + + MightNeedSummaryFiles { files: tx, task } + } + + fn summarize_files( + &self, + mut unsummarized_files: channel::Receiver, + cx: &AppContext, + ) -> SummarizeFiles { + let (summarized_tx, summarized_rx) = channel::bounded(512); + let task = cx.spawn(|cx| async move { + while let Some(file) = unsummarized_files.next().await { + log::debug!("Summarizing {:?}", file); + let summary = cx + .update(|cx| Self::summarize_code(&file.contents, &file.path, cx))? + .await + .unwrap_or_else(|err| { + // Log a warning because we'll continue anyway. + // In the future, we may want to try splitting it up into multiple requests and concatenating the summaries, + // but this might give bad summaries due to cutting off source code files in the middle. + log::warn!("Failed to summarize {} - {:?}", file.path.display(), err); + + String::new() + }); + + // Note that the summary could be empty because of an error talking to a cloud provider, + // e.g. because the context limit was exceeded. In that case, we return Ok(String::new()). + if !summary.is_empty() { + summarized_tx + .send(SummarizedFile { + path: file.path.display().to_string(), + digest: file.digest, + summary, + mtime: file.mtime, + }) + .await? + } + } + + Ok(()) + }); + + SummarizeFiles { + files: summarized_rx, + task, + } + } + + fn summarize_code( + code: &str, + path: &Path, + cx: &AppContext, + ) -> impl Future> { + let start = Instant::now(); + let (summary_model_id, use_cache): (LanguageModelId, bool) = ( + "Qwen/Qwen2-7B-Instruct".to_string().into(), // TODO read this from the user's settings. + false, // qwen2 doesn't have a cache, but we should probably infer this from the model + ); + let Some(model) = LanguageModelRegistry::read_global(cx) + .available_models(cx) + .find(|model| &model.id() == &summary_model_id) + else { + return cx.background_executor().spawn(async move { + Err(anyhow!("Couldn't find the preferred summarization model ({:?}) in the language registry's available models", summary_model_id)) + }); + }; + let utf8_path = path.to_string_lossy(); + const PROMPT_BEFORE_CODE: &str = "Summarize what the code in this file does in 3 sentences, using no newlines or bullet points in the summary:"; + let prompt = format!("{PROMPT_BEFORE_CODE}\n{utf8_path}:\n{code}"); + + log::debug!( + "Summarizing code by sending this prompt to {:?}: {:?}", + model.name(), + &prompt + ); + + let request = LanguageModelRequest { + messages: vec![LanguageModelRequestMessage { + role: Role::User, + content: vec![prompt.into()], + cache: use_cache, + }], + tools: Vec::new(), + stop: Vec::new(), + temperature: 1.0, + }; + + let code_len = code.len(); + cx.spawn(|cx| async move { + let stream = model.stream_completion(request, &cx); + cx.background_executor() + .spawn(async move { + let answer: String = stream + .await? + .filter_map(|event| async { + if let Ok(LanguageModelCompletionEvent::Text(text)) = event { + Some(text) + } else { + None + } + }) + .collect() + .await; + + log::info!( + "It took {:?} to summarize {:?} bytes of code.", + start.elapsed(), + code_len + ); + + log::debug!("Summary was: {:?}", &answer); + + Ok(answer) + }) + .await + + // TODO if summarization failed, put it back in the backlog! + }) + } + + fn persist_summaries( + &self, + summaries: channel::Receiver, + cx: &AppContext, + ) -> Task> { + let db_connection = self.db_connection.clone(); + let digest_db = self.file_digest_db; + let summary_db = self.summary_db; + cx.background_executor().spawn(async move { + let mut summaries = summaries.chunks_timeout(4096, Duration::from_secs(2)); + while let Some(summaries) = summaries.next().await { + let mut txn = db_connection.write_txn()?; + for file in &summaries { + log::debug!( + "Saving summary of {:?} - which is {} bytes of summary for content digest {:?}", + &file.path, + file.summary.len(), + file.digest + ); + digest_db.put( + &mut txn, + &file.path, + &FileDigest { + mtime: file.mtime, + digest: file.digest, + }, + )?; + summary_db.put(&mut txn, &file.digest, &file.summary)?; + } + txn.commit()?; + + drop(summaries); + log::debug!("committed summaries"); + } + + Ok(()) + }) + } + + /// Empty out the backlog of files that haven't been resummarized, and resummarize them immediately. + pub(crate) fn flush_backlog( + &self, + worktree_abs_path: Arc, + cx: &AppContext, + ) -> impl Future> { + let start = Instant::now(); + let backlogged = { + let (tx, rx) = channel::bounded(512); + let needs_summary: Vec<(Arc, Option)> = { + let mut backlog = self.backlog.lock(); + + backlog.drain().collect() + }; + + let task = cx.background_executor().spawn(async move { + tx.send(needs_summary).await?; + Ok(()) + }); + + Backlogged { + paths_to_digest: rx, + task, + } + }; + + let digest = self.digest_files(backlogged.paths_to_digest, worktree_abs_path, cx); + let needs_summary = self.check_summary_cache(digest.files, cx); + let summaries = self.summarize_files(needs_summary.files, cx); + let persist = self.persist_summaries(summaries.files, cx); + + async move { + futures::try_join!( + backlogged.task, + digest.task, + needs_summary.task, + summaries.task, + persist + )?; + + log::info!("Summarizing backlogged entries took {:?}", start.elapsed()); + + Ok(()) + } + } + + pub(crate) fn backlog_len(&self) -> usize { + self.backlog.lock().len() + } +} + +fn db_key_for_path(path: &Arc) -> String { + path.to_string_lossy().replace('/', "\0") +} diff --git a/crates/semantic_index/src/worktree_index.rs b/crates/semantic_index/src/worktree_index.rs new file mode 100644 index 00000000000000..7ca5a496196bd7 --- /dev/null +++ b/crates/semantic_index/src/worktree_index.rs @@ -0,0 +1,217 @@ +use crate::embedding::EmbeddingProvider; +use crate::embedding_index::EmbeddingIndex; +use crate::indexing::IndexingEntrySet; +use crate::summary_index::SummaryIndex; +use anyhow::Result; +use feature_flags::{AutoCommand, FeatureFlagAppExt}; +use fs::Fs; +use futures::future::Shared; +use gpui::{ + AppContext, AsyncAppContext, Context, Model, ModelContext, Subscription, Task, WeakModel, +}; +use language::LanguageRegistry; +use log; +use project::{UpdatedEntriesSet, Worktree}; +use smol::channel; +use std::sync::Arc; +use util::ResultExt; + +#[derive(Clone)] +pub enum WorktreeIndexHandle { + Loading { + index: Shared, Arc>>>, + }, + Loaded { + index: Model, + }, +} + +pub struct WorktreeIndex { + worktree: Model, + db_connection: heed::Env, + embedding_index: EmbeddingIndex, + summary_index: SummaryIndex, + entry_ids_being_indexed: Arc, + _index_entries: Task>, + _subscription: Subscription, +} + +impl WorktreeIndex { + pub fn load( + worktree: Model, + db_connection: heed::Env, + language_registry: Arc, + fs: Arc, + status_tx: channel::Sender<()>, + embedding_provider: Arc, + cx: &mut AppContext, + ) -> Task>> { + let worktree_for_index = worktree.clone(); + let worktree_for_summary = worktree.clone(); + let worktree_abs_path = worktree.read(cx).abs_path(); + let embedding_fs = Arc::clone(&fs); + let summary_fs = fs; + cx.spawn(|mut cx| async move { + let entries_being_indexed = Arc::new(IndexingEntrySet::new(status_tx)); + let (embedding_index, summary_index) = cx + .background_executor() + .spawn({ + let entries_being_indexed = Arc::clone(&entries_being_indexed); + let db_connection = db_connection.clone(); + async move { + let mut txn = db_connection.write_txn()?; + let embedding_index = { + let db_name = worktree_abs_path.to_string_lossy(); + let db = db_connection.create_database(&mut txn, Some(&db_name))?; + + EmbeddingIndex::new( + worktree_for_index, + embedding_fs, + db_connection.clone(), + db, + language_registry, + embedding_provider, + Arc::clone(&entries_being_indexed), + ) + }; + let summary_index = { + let file_digest_db = { + let db_name = + // Prepend something that wouldn't be found at the beginning of an + // absolute path, so we don't get db key namespace conflicts with + // embeddings, which use the abs path as a key. + format!("digests-{}", worktree_abs_path.to_string_lossy()); + db_connection.create_database(&mut txn, Some(&db_name))? + }; + let summary_db = { + let db_name = + // Prepend something that wouldn't be found at the beginning of an + // absolute path, so we don't get db key namespace conflicts with + // embeddings, which use the abs path as a key. + format!("summaries-{}", worktree_abs_path.to_string_lossy()); + db_connection.create_database(&mut txn, Some(&db_name))? + }; + SummaryIndex::new( + worktree_for_summary, + summary_fs, + db_connection.clone(), + file_digest_db, + summary_db, + Arc::clone(&entries_being_indexed), + ) + }; + txn.commit()?; + anyhow::Ok((embedding_index, summary_index)) + } + }) + .await?; + + cx.new_model(|cx| { + Self::new( + worktree, + db_connection, + embedding_index, + summary_index, + entries_being_indexed, + cx, + ) + }) + }) + } + + #[allow(clippy::too_many_arguments)] + pub fn new( + worktree: Model, + db_connection: heed::Env, + embedding_index: EmbeddingIndex, + summary_index: SummaryIndex, + entry_ids_being_indexed: Arc, + cx: &mut ModelContext, + ) -> Self { + let (updated_entries_tx, updated_entries_rx) = channel::unbounded(); + let _subscription = cx.subscribe(&worktree, move |_this, _worktree, event, _cx| { + if let worktree::Event::UpdatedEntries(update) = event { + log::debug!("Updating entries..."); + _ = updated_entries_tx.try_send(update.clone()); + } + }); + + Self { + db_connection, + embedding_index, + summary_index, + worktree, + entry_ids_being_indexed, + _index_entries: cx.spawn(|this, cx| Self::index_entries(this, updated_entries_rx, cx)), + _subscription, + } + } + + pub fn entry_ids_being_indexed(&self) -> &IndexingEntrySet { + self.entry_ids_being_indexed.as_ref() + } + + pub fn worktree(&self) -> &Model { + &self.worktree + } + + pub fn db_connection(&self) -> &heed::Env { + &self.db_connection + } + + pub fn embedding_index(&self) -> &EmbeddingIndex { + &self.embedding_index + } + + pub fn summary_index(&self) -> &SummaryIndex { + &self.summary_index + } + + async fn index_entries( + this: WeakModel, + updated_entries: channel::Receiver, + mut cx: AsyncAppContext, + ) -> Result<()> { + let is_auto_available = cx.update(|cx| cx.wait_for_flag::())?.await; + let index = this.update(&mut cx, |this, cx| { + futures::future::try_join( + this.embedding_index.index_entries_changed_on_disk(cx), + this.summary_index + .index_entries_changed_on_disk(is_auto_available, cx), + ) + })?; + index.await.log_err(); + + while let Ok(updated_entries) = updated_entries.recv().await { + let is_auto_available = cx + .update(|cx| cx.has_flag::()) + .unwrap_or(false); + + let index = this.update(&mut cx, |this, cx| { + futures::future::try_join( + this.embedding_index + .index_updated_entries(updated_entries.clone(), cx), + this.summary_index.index_updated_entries( + updated_entries, + is_auto_available, + cx, + ), + ) + })?; + index.await.log_err(); + } + + Ok(()) + } + + #[cfg(test)] + pub fn path_count(&self) -> Result { + use anyhow::Context; + + let txn = self + .db_connection + .read_txn() + .context("failed to create read transaction")?; + Ok(self.embedding_index().db().len(&txn)?) + } +} diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index c6e64deb59d1b1..584524a1d7b6fe 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -3227,6 +3227,8 @@ pub struct Entry { pub git_status: Option, /// Whether this entry is considered to be a `.env` file. pub is_private: bool, + /// The entry's size on disk, in bytes. + pub size: u64, pub char_bag: CharBag, pub is_fifo: bool, } @@ -3282,6 +3284,7 @@ impl Entry { path, inode: metadata.inode, mtime: Some(metadata.mtime), + size: metadata.len, canonical_path, is_symlink: metadata.is_symlink, is_ignored: false, @@ -5210,6 +5213,7 @@ impl<'a> From<&'a Entry> for proto::Entry { is_external: entry.is_external, git_status: entry.git_status.map(git_status_to_proto), is_fifo: entry.is_fifo, + size: Some(entry.size), } } } @@ -5231,6 +5235,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry { path, inode: entry.inode, mtime: entry.mtime.map(|time| time.into()), + size: entry.size.unwrap_or(0), canonical_path: None, is_ignored: entry.is_ignored, is_external: entry.is_external, From de344c833bf5b61641eb0057127d17ac830f74b4 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 13 Sep 2024 14:49:50 -0400 Subject: [PATCH 067/762] zed_extension_api: Use v0.2.0 WIT types (#17802) This PR makes `zed_extension_api` use the WIT types from v0.2.0 of extension API. A follow-up from #17795, since I had forgotten to do it there. Release Notes: - N/A --- crates/extension_api/src/extension_api.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/extension_api/src/extension_api.rs b/crates/extension_api/src/extension_api.rs index f66c6ef59f5f8c..f0b5baab9ab128 100644 --- a/crates/extension_api/src/extension_api.rs +++ b/crates/extension_api/src/extension_api.rs @@ -186,7 +186,7 @@ mod wit { wit_bindgen::generate!({ skip: ["init-extension"], - path: "./wit/since_v0.1.0", + path: "./wit/since_v0.2.0", }); } From 8f833ea0294448c8b7e9e6a09541fa9537867e65 Mon Sep 17 00:00:00 2001 From: Barry Penner <34104395+skytwosea@users.noreply.github.com> Date: Fri, 13 Sep 2024 14:51:14 -0400 Subject: [PATCH 068/762] Fix missing on-mouseup when dragging the window on Linux (#17801) Zed Hackathon entry :D Release Notes: - Fixed a bug where Zed would initiate a window move and then refuse to release the mouse. Co-authored-by: Mikayla --- crates/title_bar/src/title_bar.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index fd3f01e5f78bae..e2d45a923b7d06 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -182,6 +182,12 @@ impl Render for TitleBar { .on_mouse_down_out(cx.listener(move |this, _ev, _cx| { this.should_move = false; })) + .on_mouse_up( + gpui::MouseButton::Left, + cx.listener(move |this, _ev, _cx| { + this.should_move = false; + }), + ) .on_mouse_down( gpui::MouseButton::Left, cx.listener(move |this, _ev, _cx| { From adbe973f02b2e4cfe0645657ca10681a58f333a0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 13 Sep 2024 15:11:10 -0400 Subject: [PATCH 069/762] editor: In OpenFile check if file with path_suffix exists (#17805) Demo: https://github.com/user-attachments/assets/6acb6c1e-bb15-4205-9dcb-2aa4bb99dcf9 Release Notes: - When using `OpenFile` (`gf` in Vim mode) and the word under the cursor is not an existing file path, we now fall back and additionally check whether a file called `.` exists. That's similar to Vim's `suffixesadd` option. --------- Co-authored-by: Abdelhakim Qbaich Co-authored-by: Pete LeVasseur --- crates/editor/src/hover_links.rs | 69 +++++++++++++++++++++++++++----- crates/language/src/language.rs | 4 ++ crates/vim/src/command.rs | 19 +++++++++ 3 files changed, 83 insertions(+), 9 deletions(-) diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 86c17625e1ef92..3f590273df8e88 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -713,17 +713,42 @@ pub(crate) async fn find_file( cx: &mut AsyncWindowContext, ) -> Option<(Range, ResolvedPath)> { let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()).ok()?; - + let scope = snapshot.language_scope_at(position); let (range, candidate_file_path) = surrounding_filename(snapshot, position)?; - let existing_path = project - .update(cx, |project, cx| { - project.resolve_existing_file_path(&candidate_file_path, buffer, cx) - }) - .ok()? - .await?; + async fn check_path( + candidate_file_path: &str, + project: &Model, + buffer: &Model, + cx: &mut AsyncWindowContext, + ) -> Option { + project + .update(cx, |project, cx| { + project.resolve_existing_file_path(&candidate_file_path, buffer, cx) + }) + .ok()? + .await + } - Some((range, existing_path)) + if let Some(existing_path) = check_path(&candidate_file_path, &project, buffer, cx).await { + return Some((range, existing_path)); + } + + if let Some(scope) = scope { + for suffix in scope.path_suffixes() { + if candidate_file_path.ends_with(format!(".{suffix}").as_str()) { + continue; + } + + let suffixed_candidate = format!("{candidate_file_path}.{suffix}"); + if let Some(existing_path) = check_path(&suffixed_candidate, &project, buffer, cx).await + { + return Some((range, existing_path)); + } + } + } + + None } fn surrounding_filename( @@ -1490,7 +1515,8 @@ mod tests { You can't go to a file that does_not_exist.txt. Go to file2.rs if you want. Or go to ../dir/file2.rs if you want. - Or go to /root/dir/file2.rs if project is local.ˇ + Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file.ˇ "}); // File does not exist @@ -1499,6 +1525,7 @@ mod tests { Go to file2.rs if you want. Or go to ../dir/file2.rs if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); // No highlight @@ -1517,6 +1544,7 @@ mod tests { Go to fˇile2.rs if you want. Or go to ../dir/file2.rs if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); @@ -1525,6 +1553,7 @@ mod tests { Go to «file2.rsˇ» if you want. Or go to ../dir/file2.rs if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); // Moving the mouse over a relative path that does exist should highlight it @@ -1533,6 +1562,7 @@ mod tests { Go to file2.rs if you want. Or go to ../dir/fˇile2.rs if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); @@ -1541,6 +1571,7 @@ mod tests { Go to file2.rs if you want. Or go to «../dir/file2.rsˇ» if you want. Or go to /root/dir/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); // Moving the mouse over an absolute path that does exist should highlight it @@ -1549,6 +1580,7 @@ mod tests { Go to file2.rs if you want. Or go to ../dir/file2.rs if you want. Or go to /root/diˇr/file2.rs if project is local. + Or go to /root/dir/file2 if this is a Rust file. "}); cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); @@ -1557,6 +1589,25 @@ mod tests { Go to file2.rs if you want. Or go to ../dir/file2.rs if you want. Or go to «/root/dir/file2.rsˇ» if project is local. + Or go to /root/dir/file2 if this is a Rust file. + "}); + + // Moving the mouse over a path that exists, if we add the language-specific suffix, it should highlight it + let screen_coord = cx.pixel_position(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to /root/dir/file2.rs if project is local. + Or go to /root/diˇr/file2 if this is a Rust file. + "}); + + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + cx.assert_editor_text_highlights::(indoc! {" + You can't go to a file that does_not_exist.txt. + Go to file2.rs if you want. + Or go to ../dir/file2.rs if you want. + Or go to /root/dir/file2.rs if project is local. + Or go to «/root/dir/file2ˇ» if this is a Rust file. "}); cx.simulate_click(screen_coord, Modifiers::secondary_key()); diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index cd39490d0bbdf4..3112d88aa5543c 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1410,6 +1410,10 @@ impl Language { } impl LanguageScope { + pub fn path_suffixes(&self) -> &[String] { + &self.language.path_suffixes() + } + pub fn language_name(&self) -> LanguageName { self.language.config.name.clone() } diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 06c5f0bd3fc1ba..67a674afa6f127 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -969,6 +969,9 @@ mod test { fs.as_fake() .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec()) .await; + fs.as_fake() + .insert_file("/root/dir/file3.rs", "go to file3".as_bytes().to_vec()) + .await; // Put the path to the second file into the currently open buffer cx.set_state(indoc! {"go to fiˇle2.rs"}, Mode::Normal); @@ -981,5 +984,21 @@ mod test { cx.workspace(|workspace, cx| { assert_active_item(workspace, "/root/dir/file2.rs", "This is file2.rs", cx); }); + + // Update editor to point to `file2.rs` + cx.editor = cx.workspace(|workspace, cx| workspace.active_item_as::(cx).unwrap()); + + // Put the path to the third file into the currently open buffer, + // but remove its suffix, because we want that lookup to happen automatically. + cx.set_state(indoc! {"go to fiˇle3"}, Mode::Normal); + + // Go to file3.rs + cx.simulate_keystrokes("g f"); + + // We now have three items + cx.workspace(|workspace, cx| assert_eq!(workspace.items(cx).count(), 3)); + cx.workspace(|workspace, cx| { + assert_active_item(workspace, "/root/dir/file3.rs", "go to file3", cx); + }); } } From 1b36c62188884ae5fe5d5d3b07d36cb2ee730469 Mon Sep 17 00:00:00 2001 From: tepek2 Date: Fri, 13 Sep 2024 21:17:01 +0200 Subject: [PATCH 070/762] Add keybinding to swap pane items (#15583) - Rearrange tabs (left: `ctrl-shift-pageup`, right: `ctrl-shift-pagedown`) like Chrome Co-authored-by: Peter Tripp --- assets/keymaps/default-linux.json | 2 + assets/keymaps/default-macos.json | 2 + crates/workspace/src/pane.rs | 24 +++++++++ docs/src/key-bindings.md | 82 ++++++++++++++++--------------- 4 files changed, 70 insertions(+), 40 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 3c627d7803e1d5..bb5673dde6c178 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -245,6 +245,8 @@ "bindings": { "ctrl-pageup": "pane::ActivatePrevItem", "ctrl-pagedown": "pane::ActivateNextItem", + "ctrl-shift-pageup": "pane::SwapItemLeft", + "ctrl-shift-pagedown": "pane::SwapItemRight", "ctrl-w": "pane::CloseActiveItem", "ctrl-f4": "pane::CloseActiveItem", "alt-ctrl-t": "pane::CloseInactiveItems", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index ed6ece0556e038..9a0c08c3dcaddc 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -285,6 +285,8 @@ "cmd-}": "pane::ActivateNextItem", "alt-cmd-left": "pane::ActivatePrevItem", "alt-cmd-right": "pane::ActivateNextItem", + "ctrl-shift-pageup": "pane::SwapItemLeft", + "ctrl-shift-pagedown": "pane::SwapItemRight", "cmd-w": "pane::CloseActiveItem", "alt-cmd-t": "pane::CloseInactiveItems", "ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes", diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index d0fa411381a6d6..09b4683c0c5552 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -158,6 +158,8 @@ actions!( SplitDown, SplitHorizontal, SplitVertical, + SwapItemLeft, + SwapItemRight, TogglePreviewTab, TogglePinTab, ] @@ -1054,6 +1056,26 @@ impl Pane { self.activate_item(index, activate_pane, activate_pane, cx); } + pub fn swap_item_left(&mut self, cx: &mut ViewContext) { + let index = self.active_item_index; + if index == 0 { + return; + } + + self.items.swap(index, index - 1); + self.activate_item(index - 1, true, true, cx); + } + + pub fn swap_item_right(&mut self, cx: &mut ViewContext) { + let index = self.active_item_index; + if index + 1 == self.items.len() { + return; + } + + self.items.swap(index, index + 1); + self.activate_item(index + 1, true, true, cx); + } + pub fn close_active_item( &mut self, action: &CloseActiveItem, @@ -2574,6 +2596,8 @@ impl Render for Pane { .on_action(cx.listener(|pane: &mut Pane, _: &ActivateNextItem, cx| { pane.activate_next_item(true, cx); })) + .on_action(cx.listener(|pane, _: &SwapItemLeft, cx| pane.swap_item_left(cx))) + .on_action(cx.listener(|pane, _: &SwapItemRight, cx| pane.swap_item_right(cx))) .on_action(cx.listener(|pane, action, cx| { pane.toggle_pin_tab(action, cx); })) diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 2a97bc62a89178..989e101e7d9fbd 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -405,46 +405,48 @@ TBD: Add Column with Linux shortcuts #### Pane -| **Command** | **Target** | **Default Shortcut** | -| ----------------------------- | -------------- | ----------------------- | -| Activate item 1 | Pane | `Control + 1` | -| Activate item 2 | Pane | `Control + 2` | -| Activate item 3 | Pane | `Control + 3` | -| Activate item 4 | Pane | `Control + 4` | -| Activate item 5 | Pane | `Control + 5` | -| Activate item 6 | Pane | `Control + 6` | -| Activate item 7 | Pane | `Control + 7` | -| Activate item 8 | Pane | `Control + 8` | -| Activate item 9 | Pane | `Control + 9` | -| Activate last item | Pane | `Control + 0` | -| Activate next item | Pane | `Alt + ⌘ + Right` | -| Activate next item | Pane | `⌘ + }` | -| Activate prev item | Pane | `Alt + ⌘ + Left` | -| Activate prev item | Pane | `⌘ + {` | -| Close active item | Pane | `⌘ + W` | -| Close all items | Pane | `⌘ + K, ⌘ + W` | -| Close clean items | Pane | `⌘ + K, U` | -| Close inactive items | Pane | `Alt + ⌘ + T` | -| Go back | Pane | `Control + -` | -| Go forward | Pane | `Control + Shift + _` | -| Reopen closed item | Pane | `⌘ + Shift + T` | -| Split down | Pane | `⌘ + K, Down` | -| Split left | Pane | `⌘ + K, Left` | -| Split right | Pane | `⌘ + K, Right` | -| Split up | Pane | `⌘ + K, Up` | -| Toggle filters | Project Search | `Alt + ⌘ + F` | -| Toggle focus | Project Search | `⌘ + F` | -| Toggle focus | Project Search | `⌘ + Shift + F` | -| Activate regex mode | Search | `Alt + ⌘ + G` | -| Activate text mode | Search | `Alt + ⌘ + X` | -| Cycle mode | Search | `Alt + Tab` | -| Select all matches | Search | `Alt + Enter` | -| Select next match | Search | `⌘ + G` | -| Select prev match | Search | `⌘ + Shift + G` | -| Toggle case sensitive | Search | `Alt + ⌘ + C` | -| Toggle replace | Search | `⌘ + Shift + H` | -| Toggle whole word | Search | `Alt + ⌘ + W` | -| Close inactive tabs and panes | Workspace | `Control + Alt + ⌘ + W` | +| **Command** | **Target** | **Default Shortcut** | +| ----------------------------- | -------------- | ----------------------------- | +| Activate item 1 | Pane | `Control + 1` | +| Activate item 2 | Pane | `Control + 2` | +| Activate item 3 | Pane | `Control + 3` | +| Activate item 4 | Pane | `Control + 4` | +| Activate item 5 | Pane | `Control + 5` | +| Activate item 6 | Pane | `Control + 6` | +| Activate item 7 | Pane | `Control + 7` | +| Activate item 8 | Pane | `Control + 8` | +| Activate item 9 | Pane | `Control + 9` | +| Activate last item | Pane | `Control + 0` | +| Activate next item | Pane | `Alt + ⌘ + Right` | +| Activate next item | Pane | `⌘ + }` | +| Activate prev item | Pane | `Alt + ⌘ + Left` | +| Activate prev item | Pane | `⌘ + {` | +| Swap item to left | Pane | `Control + Shift + Page Up` | +| Swap item to right | Pane | `Control + Shift + Page Down` | +| Close active item | Pane | `⌘ + W` | +| Close all items | Pane | `⌘ + K, ⌘ + W` | +| Close clean items | Pane | `⌘ + K, U` | +| Close inactive items | Pane | `Alt + ⌘ + T` | +| Go back | Pane | `Control + -` | +| Go forward | Pane | `Control + Shift + _` | +| Reopen closed item | Pane | `⌘ + Shift + T` | +| Split down | Pane | `⌘ + K, Down` | +| Split left | Pane | `⌘ + K, Left` | +| Split right | Pane | `⌘ + K, Right` | +| Split up | Pane | `⌘ + K, Up` | +| Toggle filters | Project Search | `Alt + ⌘ + F` | +| Toggle focus | Project Search | `⌘ + F` | +| Toggle focus | Project Search | `⌘ + Shift + F` | +| Activate regex mode | Search | `Alt + ⌘ + G` | +| Activate text mode | Search | `Alt + ⌘ + X` | +| Cycle mode | Search | `Alt + Tab` | +| Select all matches | Search | `Alt + Enter` | +| Select next match | Search | `⌘ + G` | +| Select prev match | Search | `⌘ + Shift + G` | +| Toggle case sensitive | Search | `Alt + ⌘ + C` | +| Toggle replace | Search | `⌘ + Shift + H` | +| Toggle whole word | Search | `Alt + ⌘ + W` | +| Close inactive tabs and panes | Workspace | `Control + Alt + ⌘ + W` | #### Buffer Search Bar From c71f052276d57c93358b700d8022b5c8f1338289 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Fri, 13 Sep 2024 15:42:15 -0400 Subject: [PATCH 071/762] Add ability to use o1-preview and o1-mini as custom models (#17804) This is a barebones modification of the OpenAI provider code to accommodate non-streaming completions. This is specifically for the o1 models, which do not support streaming. Tested that this is working by running a `/workflow` with the following (arbitrarily chosen) settings: ```json { "language_models": { "openai": { "version": "1", "available_models": [ { "name": "o1-preview", "display_name": "o1-preview", "max_tokens": 128000, "max_completion_tokens": 30000 }, { "name": "o1-mini", "display_name": "o1-mini", "max_tokens": 128000, "max_completion_tokens": 20000 } ] } }, } ``` Release Notes: - Changed `low_speed_timeout_in_seconds` option to `600` for OpenAI provider to accommodate recent o1 model release. --------- Co-authored-by: Peter Co-authored-by: Bennet Co-authored-by: Marshall Bowers --- assets/settings/default.json | 3 +- crates/assistant/src/assistant_settings.rs | 2 + crates/assistant/src/inline_assistant.rs | 2 +- crates/language_model/src/provider/cloud.rs | 3 + crates/language_model/src/provider/open_ai.rs | 2 + crates/language_model/src/settings.rs | 2 + crates/open_ai/src/open_ai.rs | 126 +++++++++++++++++- 7 files changed, 136 insertions(+), 4 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 65254afb7cf72c..22dafb2890fb5a 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -916,7 +916,8 @@ }, "openai": { "version": "1", - "api_url": "https://api.openai.com/v1" + "api_url": "https://api.openai.com/v1", + "low_speed_timeout_in_seconds": 600 } }, // Zed's Prettier integration settings. diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index 7939eacd9344a3..e2c6a8eb24f088 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -163,11 +163,13 @@ impl AssistantSettingsContent { display_name, max_tokens, max_output_tokens, + max_completion_tokens: None, } => Some(open_ai::AvailableModel { name, display_name, max_tokens, max_output_tokens, + max_completion_tokens: None, }), _ => None, }) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 246a408477bb6e..b01a712a7e4e40 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -2407,7 +2407,7 @@ impl Codegen { Ok(LanguageModelRequest { messages, tools: Vec::new(), - stop: vec!["|END|>".to_string()], + stop: Vec::new(), temperature: 1., }) } diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 0de7fb3feb49e2..f8f64ff3b84988 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -78,6 +78,8 @@ pub struct AvailableModel { pub max_tokens: usize, /// The maximum number of output tokens allowed by the model. pub max_output_tokens: Option, + /// The maximum number of completion tokens allowed by the model (o1-* only) + pub max_completion_tokens: Option, /// Override this model with a different Anthropic model for tool calls. pub tool_override: Option, /// Indicates whether this custom model supports caching. @@ -257,6 +259,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider { display_name: model.display_name.clone(), max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, + max_completion_tokens: model.max_completion_tokens, }), AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom { name: model.name.clone(), diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index fe5e60caec8a95..98424a23aad8fe 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -43,6 +43,7 @@ pub struct AvailableModel { pub display_name: Option, pub max_tokens: usize, pub max_output_tokens: Option, + pub max_completion_tokens: Option, } pub struct OpenAiLanguageModelProvider { @@ -175,6 +176,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider { display_name: model.display_name.clone(), max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, + max_completion_tokens: model.max_completion_tokens, }, ); } diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 0059ed56c4c63b..80749c0bdb3736 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -178,11 +178,13 @@ impl OpenAiSettingsContent { display_name, max_tokens, max_output_tokens, + max_completion_tokens, } => Some(provider::open_ai::AvailableModel { name, max_tokens, max_output_tokens, display_name, + max_completion_tokens, }), _ => None, }) diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 5b621d6bb844c3..7b0294bd9c0835 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -1,12 +1,21 @@ mod supported_countries; use anyhow::{anyhow, Context, Result}; -use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt}; +use futures::{ + io::BufReader, + stream::{self, BoxStream}, + AsyncBufReadExt, AsyncReadExt, Stream, StreamExt, +}; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; use isahc::config::Configurable; use serde::{Deserialize, Serialize}; use serde_json::Value; -use std::{convert::TryFrom, future::Future, pin::Pin, time::Duration}; +use std::{ + convert::TryFrom, + future::{self, Future}, + pin::Pin, + time::Duration, +}; use strum::EnumIter; pub use supported_countries::*; @@ -72,6 +81,7 @@ pub enum Model { display_name: Option, max_tokens: usize, max_output_tokens: Option, + max_completion_tokens: Option, }, } @@ -139,6 +149,7 @@ pub struct Request { pub stream: bool, #[serde(default, skip_serializing_if = "Option::is_none")] pub max_tokens: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] pub stop: Vec, pub temperature: f32, #[serde(default, skip_serializing_if = "Option::is_none")] @@ -263,6 +274,111 @@ pub struct ResponseStreamEvent { pub usage: Option, } +#[derive(Serialize, Deserialize, Debug)] +pub struct Response { + pub id: String, + pub object: String, + pub created: u64, + pub model: String, + pub choices: Vec, + pub usage: Usage, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct Choice { + pub index: u32, + pub message: RequestMessage, + pub finish_reason: Option, +} + +pub async fn complete( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: Request, + low_speed_timeout: Option, +) -> Result { + let uri = format!("{api_url}/chat/completions"); + let mut request_builder = HttpRequest::builder() + .method(Method::POST) + .uri(uri) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", api_key)); + if let Some(low_speed_timeout) = low_speed_timeout { + request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + }; + + let mut request_body = request; + request_body.stream = false; + + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request_body)?))?; + let mut response = client.send(request).await?; + + if response.status().is_success() { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + let response: Response = serde_json::from_str(&body)?; + Ok(response) + } else { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + + #[derive(Deserialize)] + struct OpenAiResponse { + error: OpenAiError, + } + + #[derive(Deserialize)] + struct OpenAiError { + message: String, + } + + match serde_json::from_str::(&body) { + Ok(response) if !response.error.message.is_empty() => Err(anyhow!( + "Failed to connect to OpenAI API: {}", + response.error.message, + )), + + _ => Err(anyhow!( + "Failed to connect to OpenAI API: {} {}", + response.status(), + body, + )), + } + } +} + +fn adapt_response_to_stream(response: Response) -> ResponseStreamEvent { + ResponseStreamEvent { + created: response.created as u32, + model: response.model, + choices: response + .choices + .into_iter() + .map(|choice| ChoiceDelta { + index: choice.index, + delta: ResponseMessageDelta { + role: Some(match choice.message { + RequestMessage::Assistant { .. } => Role::Assistant, + RequestMessage::User { .. } => Role::User, + RequestMessage::System { .. } => Role::System, + RequestMessage::Tool { .. } => Role::Tool, + }), + content: match choice.message { + RequestMessage::Assistant { content, .. } => content, + RequestMessage::User { content } => Some(content), + RequestMessage::System { content } => Some(content), + RequestMessage::Tool { content, .. } => Some(content), + }, + tool_calls: None, + }, + finish_reason: choice.finish_reason, + }) + .collect(), + usage: Some(response.usage), + } +} + pub async fn stream_completion( client: &dyn HttpClient, api_url: &str, @@ -270,6 +386,12 @@ pub async fn stream_completion( request: Request, low_speed_timeout: Option, ) -> Result>> { + if request.model == "o1-preview" || request.model == "o1-mini" { + let response = complete(client, api_url, api_key, request, low_speed_timeout).await; + let response_stream_event = response.map(adapt_response_to_stream); + return Ok(stream::once(future::ready(response_stream_event)).boxed()); + } + let uri = format!("{api_url}/chat/completions"); let mut request_builder = HttpRequest::builder() .method(Method::POST) From e145c13f731269bd361dc64602bc9fddf61684bf Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 13 Sep 2024 16:05:34 -0400 Subject: [PATCH 072/762] Add stray UI polish to the SSH flow (#17798) Some super subtle refinement opportunities I spotted while playing around with this flow. There are mostly copywriting tweaks and some UI tweaks here and there (including editing the modal horizontal padding). --- Release Notes: - N/A --- crates/recent_projects/src/dev_servers.rs | 32 +++++++++++------------ crates/ui/src/components/modal.rs | 9 +++---- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index d8b10f31f9f55e..491f378f30ce64 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -929,7 +929,7 @@ impl DevServerProjects { .on_click( cx.listener(move |this, _, cx| this.delete_ssh_server(ix, cx)), ) - .tooltip(|cx| Tooltip::text("Remove dev server", cx)) + .tooltip(|cx| Tooltip::text("Remove Dev Server", cx)) })), ), ) @@ -1162,9 +1162,10 @@ impl DevServerProjects { }) }); - const MANUAL_SETUP_MESSAGE: &str = "Click create to generate a token for this server. The next step will provide instructions for setting zed up on that machine."; + const MANUAL_SETUP_MESSAGE: &str = + "Generate a token for this server and follow the steps to set Zed up on that machine."; const SSH_SETUP_MESSAGE: &str = - "Enter the command you use to ssh into this server.\nFor example: `ssh me@my.server` or `ssh me@secret-box:2222`."; + "Enter the command you use to SSH into this server.\nFor example: `ssh me@my.server` or `ssh me@secret-box:2222`."; Modal::new("create-dev-server", Some(self.scroll_handle.clone())) .header( @@ -1191,6 +1192,7 @@ impl DevServerProjects { .child( v_flex() .w_full() + .px_2() .gap_y(Spacing::Large.rems(cx)) .when(ssh_prompt.is_none(), |el| { el.child( @@ -1346,9 +1348,9 @@ impl DevServerProjects { ) -> Div { self.markdown.update(cx, |markdown, cx| { if kind == NewServerKind::Manual { - markdown.reset(format!("Please log into '{}'. If you don't yet have zed installed, run:\n```\ncurl https://zed.dev/install.sh | bash\n```\nThen to start zed in headless mode:\n```\nzed --dev-server-token {}\n```", dev_server_name, access_token), cx); + markdown.reset(format!("Please log into '{}'. If you don't yet have Zed installed, run:\n```\ncurl https://zed.dev/install.sh | bash\n```\nThen, to start Zed in headless mode:\n```\nzed --dev-server-token {}\n```", dev_server_name, access_token), cx); } else { - markdown.reset("Please wait while we connect over SSH.\n\nIf you run into problems, please [file a bug](https://github.com/zed-industries/zed), and in the meantime try using manual setup.".to_string(), cx); + markdown.reset("Please wait while we connect over SSH.\n\nIf you run into problems, please [file a bug](https://github.com/zed-industries/zed), and in the meantime try using the manual setup.".to_string(), cx); } }); @@ -1420,15 +1422,14 @@ impl DevServerProjects { ) .when(is_signed_out, |modal| { modal - .section(Section::new().child(v_flex().mb_4().child(Label::new( - "You are not currently signed in to Zed. Currently the remote development features are only available to signed in users. Please sign in to continue.", + .section(Section::new().child(div().child(Label::new( + "To continue with the remote development features, you need to sign in to Zed.", )))) .footer( ModalFooter::new().end_slot( - Button::new("sign_in", "Sign in") + Button::new("sign_in", "Sign in with GitHub") .icon(IconName::Github) .icon_position(IconPosition::Start) - .style(ButtonStyle::Filled) .full_width() .on_click(cx.listener(|_, _, cx| { let client = Client::global(cx).clone(); @@ -1447,17 +1448,15 @@ impl DevServerProjects { .when(!is_signed_out, |modal| { modal.section( Section::new().child( - div().mb_4().child( + div().child( List::new() - .empty_message("No dev servers registered.") + .empty_message("No dev servers registered yet.") .header(Some( ListHeader::new("Connections").end_slot( - Button::new("register-dev-server-button", "Connect") + Button::new("register-dev-server-button", "Connect New Server") .icon(IconName::Plus) .icon_position(IconPosition::Start) - .tooltip(|cx| { - Tooltip::text("Connect to a new server", cx) - }) + .icon_color(Color::Muted) .on_click(cx.listener(|this, _, cx| { this.mode = Mode::CreateDevServer( CreateDevServer { @@ -1524,6 +1523,7 @@ impl Render for DevServerProjects { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { div() .track_focus(&self.focus_handle) + .p_2() .elevation_3(cx) .key_context("DevServerModal") .on_action(cx.listener(Self::cancel)) @@ -1590,7 +1590,7 @@ pub fn reconnect_to_dev_server( cx: &mut WindowContext, ) -> Task> { let Some(ssh_connection_string) = dev_server.ssh_connection_string else { - return Task::ready(Err(anyhow!("can't reconnect, no ssh_connection_string"))); + return Task::ready(Err(anyhow!("Can't reconnect, no ssh_connection_string"))); }; let dev_server_store = dev_server_projects::Store::global(cx); let get_access_token = dev_server_store.update(cx, |store, cx| { diff --git a/crates/ui/src/components/modal.rs b/crates/ui/src/components/modal.rs index de85ee9dafa3ee..dec7a14a52fe5e 100644 --- a/crates/ui/src/components/modal.rs +++ b/crates/ui/src/components/modal.rs @@ -1,6 +1,6 @@ use crate::{ - h_flex, rems_from_px, v_flex, Clickable, Color, Headline, HeadlineSize, IconButton, - IconButtonShape, IconName, Label, LabelCommon, LabelSize, Spacing, + h_flex, v_flex, Clickable, Color, Headline, HeadlineSize, IconButton, IconButtonShape, + IconName, Label, LabelCommon, LabelSize, Spacing, }; use gpui::{prelude::FluentBuilder, *}; use smallvec::SmallVec; @@ -210,7 +210,7 @@ impl ParentElement for ModalRow { impl RenderOnce for ModalRow { fn render(self, _cx: &mut WindowContext) -> impl IntoElement { - h_flex().w_full().px_2().py_1().children(self.children) + h_flex().w_full().py_1().children(self.children) } } @@ -326,7 +326,6 @@ impl RenderOnce for Section { .border_color(cx.theme().colors().border) .bg(section_bg) .py(Spacing::Medium.rems(cx)) - .px(Spacing::Large.rems(cx) - rems_from_px(1.0)) .gap_y(Spacing::Small.rems(cx)) .child(div().flex().flex_1().size_full().children(self.children)), ) @@ -334,7 +333,7 @@ impl RenderOnce for Section { v_flex() .w_full() .gap_y(Spacing::Small.rems(cx)) - .px(Spacing::Large.rems(cx) + Spacing::Large.rems(cx)) + .px(Spacing::Medium.rems(cx) + Spacing::Medium.rems(cx)) .children(self.children) }; From d245f5e75cb095444cbe582ee7d4765e551faa99 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 13 Sep 2024 16:23:55 -0400 Subject: [PATCH 073/762] OpenAI o1-preview and o1-mini support (#17796) Release Notes: - Added support for OpenAI o1-mini and o1-preview models. --------- Co-authored-by: Jason Mancuso <7891333+jvmncs@users.noreply.github.com> Co-authored-by: Bennet --- .../language_model/src/model/cloud_model.rs | 2 ++ crates/language_model/src/provider/open_ai.rs | 11 +++++--- crates/language_model/src/request.rs | 3 ++- crates/open_ai/src/open_ai.rs | 27 ++++++++++++++----- docs/src/assistant/configuration.md | 10 +++++-- 5 files changed, 39 insertions(+), 14 deletions(-) diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index be0812eab90e70..2ce48931f6d4db 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -102,6 +102,8 @@ impl CloudModel { | open_ai::Model::FourTurbo | open_ai::Model::FourOmni | open_ai::Model::FourOmniMini + | open_ai::Model::O1Mini + | open_ai::Model::O1Preview | open_ai::Model::Custom { .. } => { LanguageModelAvailability::RequiresPlan(Plan::ZedPro) } diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 98424a23aad8fe..222c1530412aab 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -372,10 +372,13 @@ pub fn count_open_ai_tokens( }) .collect::>(); - if let open_ai::Model::Custom { .. } = model { - tiktoken_rs::num_tokens_from_messages("gpt-4", &messages) - } else { - tiktoken_rs::num_tokens_from_messages(model.id(), &messages) + match model { + open_ai::Model::Custom { .. } + | open_ai::Model::O1Mini + | open_ai::Model::O1Preview => { + tiktoken_rs::num_tokens_from_messages("gpt-4", &messages) + } + _ => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), } }) .boxed() diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 4162e9df87037a..dd480b8aaf38c2 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -241,6 +241,7 @@ pub struct LanguageModelRequest { impl LanguageModelRequest { pub fn into_open_ai(self, model: String, max_output_tokens: Option) -> open_ai::Request { + let stream = !model.starts_with("o1-"); open_ai::Request { model, messages: self @@ -259,7 +260,7 @@ impl LanguageModelRequest { }, }) .collect(), - stream: true, + stream, stop: self.stop, temperature: self.temperature, max_tokens: max_output_tokens, diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 7b0294bd9c0835..e67fe1af27cdb8 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -63,17 +63,22 @@ impl From for String { #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] pub enum Model { - #[serde(rename = "gpt-3.5-turbo", alias = "gpt-3.5-turbo-0613")] + #[serde(rename = "gpt-3.5-turbo", alias = "gpt-3.5-turbo")] ThreePointFiveTurbo, - #[serde(rename = "gpt-4", alias = "gpt-4-0613")] + #[serde(rename = "gpt-4", alias = "gpt-4")] Four, - #[serde(rename = "gpt-4-turbo-preview", alias = "gpt-4-1106-preview")] + #[serde(rename = "gpt-4-turbo", alias = "gpt-4-turbo")] FourTurbo, - #[serde(rename = "gpt-4o", alias = "gpt-4o-2024-05-13")] + #[serde(rename = "gpt-4o", alias = "gpt-4o")] #[default] FourOmni, - #[serde(rename = "gpt-4o-mini", alias = "gpt-4o-mini-2024-07-18")] + #[serde(rename = "gpt-4o-mini", alias = "gpt-4o-mini")] FourOmniMini, + #[serde(rename = "o1-preview", alias = "o1-preview")] + O1Preview, + #[serde(rename = "o1-mini", alias = "o1-mini")] + O1Mini, + #[serde(rename = "custom")] Custom { name: String, @@ -93,6 +98,8 @@ impl Model { "gpt-4-turbo-preview" => Ok(Self::FourTurbo), "gpt-4o" => Ok(Self::FourOmni), "gpt-4o-mini" => Ok(Self::FourOmniMini), + "o1-preview" => Ok(Self::O1Preview), + "o1-mini" => Ok(Self::O1Mini), _ => Err(anyhow!("invalid model id")), } } @@ -101,9 +108,11 @@ impl Model { match self { Self::ThreePointFiveTurbo => "gpt-3.5-turbo", Self::Four => "gpt-4", - Self::FourTurbo => "gpt-4-turbo-preview", + Self::FourTurbo => "gpt-4-turbo", Self::FourOmni => "gpt-4o", Self::FourOmniMini => "gpt-4o-mini", + Self::O1Preview => "o1-preview", + Self::O1Mini => "o1-mini", Self::Custom { name, .. } => name, } } @@ -115,6 +124,8 @@ impl Model { Self::FourTurbo => "gpt-4-turbo", Self::FourOmni => "gpt-4o", Self::FourOmniMini => "gpt-4o-mini", + Self::O1Preview => "o1-preview", + Self::O1Mini => "o1-mini", Self::Custom { name, display_name, .. } => display_name.as_ref().unwrap_or(name), @@ -123,11 +134,13 @@ impl Model { pub fn max_token_count(&self) -> usize { match self { - Self::ThreePointFiveTurbo => 4096, + Self::ThreePointFiveTurbo => 16385, Self::Four => 8192, Self::FourTurbo => 128000, Self::FourOmni => 128000, Self::FourOmniMini => 128000, + Self::O1Preview => 128000, + Self::O1Mini => 128000, Self::Custom { max_tokens, .. } => *max_tokens, } } diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index 0fd242c6191b4d..4d9870e8960a0a 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -165,7 +165,7 @@ Zed will also use the `OPENAI_API_KEY` environment variable if it's defined. #### OpenAI Custom Models {#openai-custom-models} -The Zed Assistant comes pre-configured to use the latest version for common models (GPT-3.5 Turbo, GPT-4, GPT-4 Turbo, GPT-4o, GPT-4o mini). If you wish to use alternate models, perhaps a preview release or a dated model release, you can do so by adding the following to your Zed `settings.json`: +The Zed Assistant comes pre-configured to use the latest version for common models (GPT-3.5 Turbo, GPT-4, GPT-4 Turbo, GPT-4o, GPT-4o mini). If you wish to use alternate models, perhaps a preview release or a dated model release or you wish to control the request parameters you can do so by adding the following to your Zed `settings.json`: ```json { @@ -176,6 +176,12 @@ The Zed Assistant comes pre-configured to use the latest version for common mode "provider": "openai", "name": "gpt-4o-2024-08-06", "max_tokens": 128000 + }, + { + "name": "o1-mini", + "display_name": "o1-mini", + "max_tokens": 128000, + "max_completion_tokens": 20000 } ] } @@ -183,7 +189,7 @@ The Zed Assistant comes pre-configured to use the latest version for common mode } ``` -You must provide the model's Context Window in the `max_tokens` parameter, this can be found [OpenAI Model Docs](https://platform.openai.com/docs/models). Custom models will be listed in the model dropdown in the assistant panel. +You must provide the model's Context Window in the `max_tokens` parameter, this can be found [OpenAI Model Docs](https://platform.openai.com/docs/models). OpenAI `o1` models should set `max_completion_tokens` as well to avoid incurring high reasoning token costs. Custom models will be listed in the model dropdown in the assistant panel. ### Advanced configuration {#advanced-configuration} From fac9ee5f861b501b4ca79ab04d410eeb49d9ec5e Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Fri, 13 Sep 2024 16:45:16 -0400 Subject: [PATCH 074/762] Add `ui_macros` crate & `DerivePathStr` derive macro (#17811) This PR adds the `ui_macros` crate to allow building supporting macros for the `ui` crate. Additionally, it implements the `DerivePathStr` derive macro and the `path_str` attribute macro. These macros work together to generate a `path` method for enum variants, which is useful for creating standardized string representations of enum variants. The `DerivePathStr` macro provides the following functionality: - Generates a `path` method for each enum variant. - Allows specifying a prefix (required) and suffix (optional) for all paths. - Supports `strum` attributes for case conversion (e.g., snake_case, lowercase). Usage example: ```rust #[derive(DerivePathStr)] #[path_str(prefix = "my_prefix", suffix = ".txt")] #[strum(serialize_all = "snake_case")] enum MyEnum { VariantOne, VariantTwo, } // Generated paths: // MyEnum::VariantOne.path() -> "my_prefix/variant_one.txt" // MyEnum::VariantTwo.path() -> "my_prefix/variant_two.txt" ``` In a later PR this will be used to automate the creation of icon & image paths in the `ui` crate. This gives the following benefits: 1. Ensures standard naming of assets as paths are not manually specified. 2. Makes adding new enum variants less tedious and error-prone. 3. Quickly catches missing or incorrect paths during compilation. 3. Adds a building block towards being able to lint for unused assets in the future. Release Notes: - N/A --- Cargo.lock | 11 +++ Cargo.toml | 3 + crates/editor/Cargo.toml | 2 +- crates/ui/Cargo.toml | 1 + crates/ui/src/path_str.rs | 33 ++++++++ crates/ui/src/ui.rs | 1 + crates/ui_macros/Cargo.toml | 19 +++++ crates/ui_macros/LICENSE-GPL | 1 + crates/ui_macros/src/derive_path_str.rs | 105 ++++++++++++++++++++++++ crates/ui_macros/src/ui_macros.rs | 53 ++++++++++++ 10 files changed, 228 insertions(+), 1 deletion(-) create mode 100644 crates/ui/src/path_str.rs create mode 100644 crates/ui_macros/Cargo.toml create mode 120000 crates/ui_macros/LICENSE-GPL create mode 100644 crates/ui_macros/src/derive_path_str.rs create mode 100644 crates/ui_macros/src/ui_macros.rs diff --git a/Cargo.lock b/Cargo.lock index 793cb66ad77b07..79f4e803a8461a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12273,6 +12273,7 @@ dependencies = [ "story", "strum 0.25.0", "theme", + "ui_macros", "windows 0.58.0", ] @@ -12287,6 +12288,16 @@ dependencies = [ "ui", ] +[[package]] +name = "ui_macros" +version = "0.1.0" +dependencies = [ + "convert_case 0.6.0", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "unicase" version = "2.7.0" diff --git a/Cargo.toml b/Cargo.toml index 53109002fa1139..726ffe0cca3782 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -118,6 +118,7 @@ members = [ "crates/title_bar", "crates/ui", "crates/ui_input", + "crates/ui_macros", "crates/util", "crates/vcs_menu", "crates/vim", @@ -292,6 +293,7 @@ time_format = { path = "crates/time_format" } title_bar = { path = "crates/title_bar" } ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } +ui_macros = { path = "crates/ui_macros" } util = { path = "crates/util" } vcs_menu = { path = "crates/vcs_menu" } vim = { path = "crates/vim" } @@ -333,6 +335,7 @@ chrono = { version = "0.4", features = ["serde"] } clap = { version = "4.4", features = ["derive"] } clickhouse = "0.11.6" cocoa = "0.26" +convert_case = "0.6.0" core-foundation = "0.9.3" core-foundation-sys = "0.8.6" ctor = "0.2.6" diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 324201b41ebaa0..b1cc59ace60322 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -35,7 +35,7 @@ chrono.workspace = true client.workspace = true clock.workspace = true collections.workspace = true -convert_case = "0.6.0" +convert_case.workspace = true db.workspace = true emojis.workspace = true file_icons.workspace = true diff --git a/crates/ui/Cargo.toml b/crates/ui/Cargo.toml index 71e67cb1842a43..594814ae2a4b27 100644 --- a/crates/ui/Cargo.toml +++ b/crates/ui/Cargo.toml @@ -23,6 +23,7 @@ smallvec.workspace = true story = { workspace = true, optional = true } strum = { workspace = true, features = ["derive"] } theme.workspace = true +ui_macros.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true diff --git a/crates/ui/src/path_str.rs b/crates/ui/src/path_str.rs new file mode 100644 index 00000000000000..2ebb3fedb36266 --- /dev/null +++ b/crates/ui/src/path_str.rs @@ -0,0 +1,33 @@ +#[cfg(test)] +mod tests { + use strum::EnumString; + use ui_macros::{path_str, DerivePathStr}; + + #[test] + fn test_derive_path_str_with_prefix() { + #[derive(Debug, EnumString, DerivePathStr)] + #[strum(serialize_all = "snake_case")] + #[path_str(prefix = "test_prefix")] + enum MyEnum { + FooBar, + Baz, + } + + assert_eq!(MyEnum::FooBar.path(), "test_prefix/foo_bar"); + assert_eq!(MyEnum::Baz.path(), "test_prefix/baz"); + } + + #[test] + fn test_derive_path_str_with_prefix_and_suffix() { + #[derive(Debug, EnumString, DerivePathStr)] + #[strum(serialize_all = "snake_case")] + #[path_str(prefix = "test_prefix", suffix = ".txt")] + enum MyEnum { + FooBar, + Baz, + } + + assert_eq!(MyEnum::FooBar.path(), "test_prefix/foo_bar.txt"); + assert_eq!(MyEnum::Baz.path(), "test_prefix/baz.txt"); + } +} diff --git a/crates/ui/src/ui.rs b/crates/ui/src/ui.rs index a0146c69fac7c0..4f5d6314bea6a6 100644 --- a/crates/ui/src/ui.rs +++ b/crates/ui/src/ui.rs @@ -8,6 +8,7 @@ mod components; mod disableable; mod fixed; mod key_bindings; +mod path_str; pub mod prelude; mod selectable; mod styled_ext; diff --git a/crates/ui_macros/Cargo.toml b/crates/ui_macros/Cargo.toml new file mode 100644 index 00000000000000..72009f1162cc8d --- /dev/null +++ b/crates/ui_macros/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "ui_macros" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/ui_macros.rs" +proc-macro = true + +[dependencies] +proc-macro2 = "1.0.66" +quote = "1.0.9" +syn = { version = "1.0.72", features = ["full", "extra-traits"] } +convert_case.workspace = true diff --git a/crates/ui_macros/LICENSE-GPL b/crates/ui_macros/LICENSE-GPL new file mode 120000 index 00000000000000..89e542f750cd38 --- /dev/null +++ b/crates/ui_macros/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/ui_macros/src/derive_path_str.rs b/crates/ui_macros/src/derive_path_str.rs new file mode 100644 index 00000000000000..3988bc0c5c2e08 --- /dev/null +++ b/crates/ui_macros/src/derive_path_str.rs @@ -0,0 +1,105 @@ +use convert_case::{Case, Casing}; +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, Attribute, Data, DeriveInput, Lit, Meta, NestedMeta}; + +pub fn derive_path_str(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let name = &input.ident; + + let prefix = get_attr_value(&input.attrs, "prefix").expect("prefix attribute is required"); + let suffix = get_attr_value(&input.attrs, "suffix").unwrap_or_else(|| "".to_string()); + + let serialize_all = get_strum_serialize_all(&input.attrs); + let path_str_impl = impl_path_str(name, &input.data, &prefix, &suffix, serialize_all); + + let expanded = quote! { + impl #name { + pub fn path(&self) -> &'static str { + #path_str_impl + } + } + }; + + TokenStream::from(expanded) +} + +fn impl_path_str( + name: &syn::Ident, + data: &Data, + prefix: &str, + suffix: &str, + serialize_all: Option, +) -> proc_macro2::TokenStream { + match *data { + Data::Enum(ref data) => { + let match_arms = data.variants.iter().map(|variant| { + let ident = &variant.ident; + let variant_name = if let Some(ref case) = serialize_all { + match case.as_str() { + "snake_case" => ident.to_string().to_case(Case::Snake), + "lowercase" => ident.to_string().to_lowercase(), + _ => ident.to_string(), + } + } else { + ident.to_string() + }; + let path = format!("{}/{}{}", prefix, variant_name, suffix); + quote! { + #name::#ident => #path, + } + }); + + quote! { + match self { + #(#match_arms)* + } + } + } + _ => panic!("DerivePathStr only supports enums"), + } +} + +fn get_strum_serialize_all(attrs: &[Attribute]) -> Option { + attrs + .iter() + .filter(|attr| attr.path.is_ident("strum")) + .find_map(|attr| { + if let Ok(Meta::List(meta_list)) = attr.parse_meta() { + meta_list.nested.iter().find_map(|nested_meta| { + if let NestedMeta::Meta(Meta::NameValue(name_value)) = nested_meta { + if name_value.path.is_ident("serialize_all") { + if let Lit::Str(lit_str) = &name_value.lit { + return Some(lit_str.value()); + } + } + } + None + }) + } else { + None + } + }) +} + +fn get_attr_value(attrs: &[Attribute], key: &str) -> Option { + attrs + .iter() + .filter(|attr| attr.path.is_ident("path_str")) + .find_map(|attr| { + if let Ok(Meta::List(meta_list)) = attr.parse_meta() { + meta_list.nested.iter().find_map(|nested_meta| { + if let NestedMeta::Meta(Meta::NameValue(name_value)) = nested_meta { + if name_value.path.is_ident(key) { + if let Lit::Str(lit_str) = &name_value.lit { + return Some(lit_str.value()); + } + } + } + None + }) + } else { + None + } + }) +} diff --git a/crates/ui_macros/src/ui_macros.rs b/crates/ui_macros/src/ui_macros.rs new file mode 100644 index 00000000000000..a625caefd5111b --- /dev/null +++ b/crates/ui_macros/src/ui_macros.rs @@ -0,0 +1,53 @@ +mod derive_path_str; + +use proc_macro::TokenStream; + +/// Derives the `path` method for an enum. +/// +/// This macro generates a `path` method for each variant of the enum, which returns a string +/// representation of the enum variant's path. The path is constructed using a prefix and +/// optionally a suffix, which are specified using attributes. +/// +/// # Attributes +/// +/// - `#[path_str(prefix = "...")]`: Required. Specifies the prefix for all paths. +/// - `#[path_str(suffix = "...")]`: Optional. Specifies a suffix for all paths. +/// - `#[strum(serialize_all = "...")]`: Optional. Specifies the case conversion for variant names. +/// +/// # Example +/// +/// ``` +/// use strum::EnumString; +/// use ui_macros::{path_str, DerivePathStr}; +/// +/// #[derive(EnumString, DerivePathStr)] +/// #[path_str(prefix = "my_prefix", suffix = ".txt")] +/// #[strum(serialize_all = "snake_case")] +/// enum MyEnum { +/// VariantOne, +/// VariantTwo, +/// } +/// +/// // These assertions would work if we could instantiate the enum +/// // assert_eq!(MyEnum::VariantOne.path(), "my_prefix/variant_one.txt"); +/// // assert_eq!(MyEnum::VariantTwo.path(), "my_prefix/variant_two.txt"); +/// ``` +/// +/// # Panics +/// +/// This macro will panic if used on anything other than an enum. +#[proc_macro_derive(DerivePathStr, attributes(path_str))] +pub fn derive_path_str(input: TokenStream) -> TokenStream { + derive_path_str::derive_path_str(input) +} + +/// A marker attribute for use with `DerivePathStr`. +/// +/// This attribute is used to specify the prefix and suffix for the `path` method +/// generated by `DerivePathStr`. It doesn't modify the input and is only used as a +/// marker for the derive macro. +#[proc_macro_attribute] +pub fn path_str(_args: TokenStream, input: TokenStream) -> TokenStream { + // This attribute doesn't modify the input, it's just a marker + input +} From ce848375fe6511c2e167a9d002af8af4772bf08c Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Fri, 13 Sep 2024 17:44:16 -0400 Subject: [PATCH 075/762] add `ui::Vector` and separate images from icons (#17815) This PR pulls non-icon assets out of `ui::components::icon` in preparation for icon standardization. In the future icons will have standard names and sizes, and these image assets won't conform to those constraints. We can also add a `ui::components::image::Image` wrapper around the `gpui::img` element in the future for any Zed-specific image styling we want to enforce. Of note: ```rust #[derive(Debug, PartialEq, Eq, Copy, Clone, EnumIter, EnumString, IntoStaticStr, Serialize, Deserialize, DerivePathStr)] #[strum(serialize_all = "snake_case")] #[path_str(prefix = "images", suffix = ".svg")] pub enum VectorName { ZedLogo, ZedXCopilot, } ``` You can see in the above code we no longer need to manually specify paths for image/icon enums like we currently do in `ui::components::icon`. The icon component will get this same treatment in the future, once we: - do the design work needed to standardize the icons - remove unused icons - update icon names Release Notes: - N/A --- assets/images/zed_logo.svg | 10 ++ assets/images/zed_x_copilot.svg | 14 +++ crates/assets/src/assets.rs | 1 + crates/copilot/src/sign_in.rs | 12 +- .../gpui_macros/src/derive_path_static_str.rs | 73 +++++++++++ crates/gpui_macros/src/gpui_macros.rs | 7 ++ crates/storybook/src/assets.rs | 1 + crates/storybook/src/story_selector.rs | 2 + crates/ui/src/components.rs | 4 + crates/ui/src/components/icon.rs | 2 - crates/ui/src/components/image.rs | 115 ++++++++++++++++++ 11 files changed, 231 insertions(+), 10 deletions(-) create mode 100644 assets/images/zed_logo.svg create mode 100644 assets/images/zed_x_copilot.svg create mode 100644 crates/gpui_macros/src/derive_path_static_str.rs create mode 100644 crates/ui/src/components/image.rs diff --git a/assets/images/zed_logo.svg b/assets/images/zed_logo.svg new file mode 100644 index 00000000000000..d1769449c19840 --- /dev/null +++ b/assets/images/zed_logo.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/assets/images/zed_x_copilot.svg b/assets/images/zed_x_copilot.svg new file mode 100644 index 00000000000000..3c5be71074c195 --- /dev/null +++ b/assets/images/zed_x_copilot.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/crates/assets/src/assets.rs b/crates/assets/src/assets.rs index 395cbf62f6ce5e..ee990085f6de17 100644 --- a/crates/assets/src/assets.rs +++ b/crates/assets/src/assets.rs @@ -8,6 +8,7 @@ use rust_embed::RustEmbed; #[folder = "../../assets"] #[include = "fonts/**/*"] #[include = "icons/**/*"] +#[include = "images/**/*"] #[include = "themes/**/*"] #[exclude = "themes/src/*"] #[include = "sounds/**/*"] diff --git a/crates/copilot/src/sign_in.rs b/crates/copilot/src/sign_in.rs index 1d14e5c1aadc55..da6b969b7222bb 100644 --- a/crates/copilot/src/sign_in.rs +++ b/crates/copilot/src/sign_in.rs @@ -1,10 +1,10 @@ use crate::{request::PromptUserDeviceFlow, Copilot, Status}; use gpui::{ - div, svg, AppContext, ClipboardItem, DismissEvent, Element, EventEmitter, FocusHandle, + div, AppContext, ClipboardItem, DismissEvent, Element, EventEmitter, FocusHandle, FocusableView, InteractiveElement, IntoElement, Model, MouseDownEvent, ParentElement, Render, Styled, Subscription, ViewContext, }; -use ui::{prelude::*, Button, IconName, Label}; +use ui::{prelude::*, Button, Label, Vector, VectorName}; use workspace::ModalView; const COPILOT_SIGN_UP_URL: &str = "https://github.com/features/copilot"; @@ -198,12 +198,8 @@ impl Render for CopilotCodeVerification { cx.focus(&this.focus_handle); })) .child( - svg() - .w_32() - .h_16() - .flex_none() - .path(IconName::ZedXCopilot.path()) - .text_color(cx.theme().colors().icon), + Vector::new(VectorName::ZedXCopilot, rems(8.), rems(4.)) + .color(Color::Custom(cx.theme().colors().icon)), ) .child(prompt) } diff --git a/crates/gpui_macros/src/derive_path_static_str.rs b/crates/gpui_macros/src/derive_path_static_str.rs new file mode 100644 index 00000000000000..25531fd2adf9d5 --- /dev/null +++ b/crates/gpui_macros/src/derive_path_static_str.rs @@ -0,0 +1,73 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, Attribute, Data, DeriveInput, Lit, Meta, NestedMeta}; + +pub fn derive_path_static_str(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let name = &input.ident; + + let prefix = get_attr_value(&input.attrs, "prefix").unwrap_or_else(|| "".to_string()); + let suffix = get_attr_value(&input.attrs, "suffix").unwrap_or_else(|| "".to_string()); + let delimiter = get_attr_value(&input.attrs, "delimiter").unwrap_or_else(|| "/".to_string()); + + let path_str_impl = impl_path_str(name, &input.data, &prefix, &suffix, &delimiter); + + let expanded = quote! { + impl #name { + pub fn path_str(&self) -> &'static str { + #path_str_impl + } + } + }; + + TokenStream::from(expanded) +} + +fn impl_path_str( + name: &syn::Ident, + data: &Data, + prefix: &str, + suffix: &str, + delimiter: &str, +) -> proc_macro2::TokenStream { + match *data { + Data::Enum(ref data) => { + let match_arms = data.variants.iter().map(|variant| { + let ident = &variant.ident; + let path = format!("{}{}{}{}{}", prefix, delimiter, ident, delimiter, suffix); + quote! { + #name::#ident => #path, + } + }); + + quote! { + match self { + #(#match_arms)* + } + } + } + _ => panic!("DerivePathStr only supports enums"), + } +} + +fn get_attr_value(attrs: &[Attribute], key: &str) -> Option { + attrs + .iter() + .filter(|attr| attr.path.is_ident("derive_path_static_str")) + .find_map(|attr| { + if let Ok(Meta::List(meta_list)) = attr.parse_meta() { + meta_list.nested.iter().find_map(|nested_meta| { + if let NestedMeta::Meta(Meta::NameValue(name_value)) = nested_meta { + if name_value.path.is_ident(key) { + if let Lit::Str(lit_str) = &name_value.lit { + return Some(lit_str.value()); + } + } + } + None + }) + } else { + None + } + }) +} diff --git a/crates/gpui_macros/src/gpui_macros.rs b/crates/gpui_macros/src/gpui_macros.rs index c4cf5358b3e756..09cf4027d2f37c 100644 --- a/crates/gpui_macros/src/gpui_macros.rs +++ b/crates/gpui_macros/src/gpui_macros.rs @@ -1,4 +1,5 @@ mod derive_into_element; +mod derive_path_static_str; mod derive_render; mod register_action; mod styles; @@ -27,6 +28,12 @@ pub fn derive_render(input: TokenStream) -> TokenStream { derive_render::derive_render(input) } +#[proc_macro_derive(PathStaticStr)] +#[doc(hidden)] +pub fn derive_path_static_str(input: TokenStream) -> TokenStream { + derive_path_static_str::derive_path_static_str(input) +} + /// Used by GPUI to generate the style helpers. #[proc_macro] #[doc(hidden)] diff --git a/crates/storybook/src/assets.rs b/crates/storybook/src/assets.rs index da874e5f2de143..f45d1457df91fc 100644 --- a/crates/storybook/src/assets.rs +++ b/crates/storybook/src/assets.rs @@ -8,6 +8,7 @@ use rust_embed::RustEmbed; #[folder = "../../assets"] #[include = "fonts/**/*"] #[include = "icons/**/*"] +#[include = "images/**/*"] #[include = "themes/**/*"] #[include = "sounds/**/*"] #[include = "*.md"] diff --git a/crates/storybook/src/story_selector.rs b/crates/storybook/src/story_selector.rs index 5df02b1df2f488..881fd83f8f21b9 100644 --- a/crates/storybook/src/story_selector.rs +++ b/crates/storybook/src/story_selector.rs @@ -40,6 +40,7 @@ pub enum ComponentStory { ToolStrip, ViewportUnits, WithRemSize, + Vector, } impl ComponentStory { @@ -75,6 +76,7 @@ impl ComponentStory { Self::ToolStrip => cx.new_view(|_| ui::ToolStripStory).into(), Self::ViewportUnits => cx.new_view(|_| crate::stories::ViewportUnitsStory).into(), Self::WithRemSize => cx.new_view(|_| crate::stories::WithRemSizeStory).into(), + Self::Vector => cx.new_view(|_| ui::VectorStory).into(), } } } diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index 3a56e46eae49c7..fe63b035027afb 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -7,6 +7,7 @@ mod divider; mod dropdown_menu; mod facepile; mod icon; +mod image; mod indicator; mod keybinding; mod label; @@ -37,6 +38,7 @@ pub use divider::*; pub use dropdown_menu::*; pub use facepile::*; pub use icon::*; +pub use image::*; pub use indicator::*; pub use keybinding::*; pub use label::*; @@ -55,5 +57,7 @@ pub use tab_bar::*; pub use tool_strip::*; pub use tooltip::*; +#[cfg(feature = "stories")] +pub use image::story::*; #[cfg(feature = "stories")] pub use stories::*; diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index 0001ab4a2b275b..fd4f17ac0e0ef7 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -271,7 +271,6 @@ pub enum IconName { XCircle, ZedAssistant, ZedAssistantFilled, - ZedXCopilot, Visible, } @@ -443,7 +442,6 @@ impl IconName { IconName::XCircle => "icons/error.svg", IconName::ZedAssistant => "icons/zed_assistant.svg", IconName::ZedAssistantFilled => "icons/zed_assistant_filled.svg", - IconName::ZedXCopilot => "icons/zed_x_copilot.svg", IconName::Visible => "icons/visible.svg", } } diff --git a/crates/ui/src/components/image.rs b/crates/ui/src/components/image.rs new file mode 100644 index 00000000000000..286fe7f56f3e31 --- /dev/null +++ b/crates/ui/src/components/image.rs @@ -0,0 +1,115 @@ +use gpui::{svg, IntoElement, Rems, RenderOnce, Size, Styled, WindowContext}; +use serde::{Deserialize, Serialize}; +use strum::{EnumIter, EnumString, IntoStaticStr}; +use ui_macros::{path_str, DerivePathStr}; + +use crate::Color; + +#[derive( + Debug, + PartialEq, + Eq, + Copy, + Clone, + EnumIter, + EnumString, + IntoStaticStr, + Serialize, + Deserialize, + DerivePathStr, +)] +#[strum(serialize_all = "snake_case")] +#[path_str(prefix = "images", suffix = ".svg")] +pub enum VectorName { + ZedLogo, + ZedXCopilot, +} + +/// A vector image, such as an SVG. +/// +/// A [Vector] is different from an [Icon] in that it is intended +/// to be displayed at a specific size, or series of sizes, rather +/// than conforming to the standard size of an icons. +#[derive(IntoElement)] +pub struct Vector { + path: &'static str, + color: Color, + size: Size, +} + +impl Vector { + /// Create a new [Vector] image with the given [VectorName] and size. + pub fn new(vector: VectorName, width: Rems, height: Rems) -> Self { + Self { + path: vector.path(), + color: Color::default(), + size: Size { width, height }, + } + } + + /// Create a new [Vector] image where the width and height are the same. + pub fn square(vector: VectorName, size: Rems) -> Self { + Self::new(vector, size, size) + } + + /// Set the image color + pub fn color(mut self, color: Color) -> Self { + self.color = color; + self + } + + /// Set the image size + pub fn size(mut self, size: impl Into>) -> Self { + let size = size.into(); + + self.size = size; + self + } +} + +impl RenderOnce for Vector { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + let width = self.size.width; + let height = self.size.height; + + svg() + // By default, prevent the SVG from stretching + // to fill its container. + .flex_none() + .w(width) + .h(height) + .path(self.path) + .text_color(self.color.color(cx)) + } +} + +#[cfg(feature = "stories")] +pub mod story { + use gpui::Render; + use story::{Story, StoryItem, StorySection}; + use strum::IntoEnumIterator; + + use crate::prelude::*; + + use super::{Vector, VectorName}; + + pub struct VectorStory; + + impl Render for VectorStory { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + Story::container().child(StorySection::new().children(VectorName::iter().map( + |vector| StoryItem::new(format!("{:?}", vector), Vector::square(vector, rems(8.))), + ))) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn vector_path() { + assert_eq!(VectorName::ZedLogo.path(), "images/zed_logo.svg"); + } +} From e8a2dd92c82f56d01067aa9966eaaf095a7e657c Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Fri, 13 Sep 2024 21:12:29 -0400 Subject: [PATCH 076/762] Derive icon paths (#17816) This PR improves adding and working with icons by using the new `DerivePathStr` to derive icon paths. This means paths no longer need to be manually specified, and the `IconName` and file name will always be consistent between icons. This PR does not do any work to standardize icons visually, remove unused icons, or any other such cleanup. Release Notes: - N/A --- assets/icons/audio_off.svg | 1 + assets/icons/audio_on.svg | 1 + ...ase_insensitive.svg => case_sensitive.svg} | 0 assets/icons/{x.svg => close.svg} | 0 .../{text_select.svg => cursor_i_beam.svg} | 0 .../{text-cursor.svg => cursor_text.svg} | 0 assets/icons/{feedback.svg => envelope.svg} | 0 assets/icons/file_doc.svg | 6 + assets/icons/file_generic.svg | 5 + assets/icons/file_git.svg | 6 + assets/icons/file_lock.svg | 4 + assets/icons/file_rust.svg | 4 + assets/icons/file_toml.svg | 5 + assets/icons/{project.svg => file_tree.svg} | 0 assets/icons/folder.svg | 3 + assets/icons/folder_open.svg | 4 + .../icons/{stop_sharing.svg => folder_x.svg} | 0 ...{conversations.svg => message_bubbles.svg} | 0 assets/icons/{desktop.svg => screen.svg} | 0 assets/icons/settings.svg | 4 + .../{sliders-alt.svg => settings_alt.svg} | 0 assets/icons/speaker_off.svg | 8 - .../{user_group_16.svg => user_group.svg} | 0 .../icons/{word_search.svg => whole_word.svg} | 0 assets/icons/{error.svg => x_circle.svg} | 0 .../src/activity_indicator.rs | 6 +- crates/assistant/src/assistant_panel.rs | 6 +- crates/assistant/src/inline_assistant.rs | 2 +- .../src/slash_command/diagnostics_command.rs | 4 +- .../src/terminal_inline_assistant.rs | 2 +- crates/collab_ui/src/collab_panel.rs | 2 +- crates/diagnostics/src/diagnostics.rs | 4 +- crates/diagnostics/src/items.rs | 4 +- crates/diagnostics/src/toolbar_controls.rs | 2 +- .../quick_action_bar/src/quick_action_bar.rs | 2 +- crates/terminal_view/src/terminal_view.rs | 2 +- crates/ui/src/components/icon.rs | 219 +++--------------- .../ui/src/components/stories/list_header.rs | 2 +- crates/workspace/src/notifications.rs | 14 +- 39 files changed, 96 insertions(+), 226 deletions(-) create mode 100644 assets/icons/audio_off.svg create mode 100644 assets/icons/audio_on.svg rename assets/icons/{case_insensitive.svg => case_sensitive.svg} (100%) rename assets/icons/{x.svg => close.svg} (100%) rename assets/icons/{text_select.svg => cursor_i_beam.svg} (100%) rename assets/icons/{text-cursor.svg => cursor_text.svg} (100%) rename assets/icons/{feedback.svg => envelope.svg} (100%) create mode 100644 assets/icons/file_doc.svg create mode 100644 assets/icons/file_generic.svg create mode 100644 assets/icons/file_git.svg create mode 100644 assets/icons/file_lock.svg create mode 100644 assets/icons/file_rust.svg create mode 100644 assets/icons/file_toml.svg rename assets/icons/{project.svg => file_tree.svg} (100%) create mode 100644 assets/icons/folder.svg create mode 100644 assets/icons/folder_open.svg rename assets/icons/{stop_sharing.svg => folder_x.svg} (100%) rename assets/icons/{conversations.svg => message_bubbles.svg} (100%) rename assets/icons/{desktop.svg => screen.svg} (100%) create mode 100644 assets/icons/settings.svg rename assets/icons/{sliders-alt.svg => settings_alt.svg} (100%) delete mode 100644 assets/icons/speaker_off.svg rename assets/icons/{user_group_16.svg => user_group.svg} (100%) rename assets/icons/{word_search.svg => whole_word.svg} (100%) rename assets/icons/{error.svg => x_circle.svg} (100%) diff --git a/assets/icons/audio_off.svg b/assets/icons/audio_off.svg new file mode 100644 index 00000000000000..93b98471ca1a15 --- /dev/null +++ b/assets/icons/audio_off.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/audio_on.svg b/assets/icons/audio_on.svg new file mode 100644 index 00000000000000..42310ea32c289e --- /dev/null +++ b/assets/icons/audio_on.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/case_insensitive.svg b/assets/icons/case_sensitive.svg similarity index 100% rename from assets/icons/case_insensitive.svg rename to assets/icons/case_sensitive.svg diff --git a/assets/icons/x.svg b/assets/icons/close.svg similarity index 100% rename from assets/icons/x.svg rename to assets/icons/close.svg diff --git a/assets/icons/text_select.svg b/assets/icons/cursor_i_beam.svg similarity index 100% rename from assets/icons/text_select.svg rename to assets/icons/cursor_i_beam.svg diff --git a/assets/icons/text-cursor.svg b/assets/icons/cursor_text.svg similarity index 100% rename from assets/icons/text-cursor.svg rename to assets/icons/cursor_text.svg diff --git a/assets/icons/feedback.svg b/assets/icons/envelope.svg similarity index 100% rename from assets/icons/feedback.svg rename to assets/icons/envelope.svg diff --git a/assets/icons/file_doc.svg b/assets/icons/file_doc.svg new file mode 100644 index 00000000000000..3b11995f36759e --- /dev/null +++ b/assets/icons/file_doc.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_generic.svg b/assets/icons/file_generic.svg new file mode 100644 index 00000000000000..3c72bd3320d9e8 --- /dev/null +++ b/assets/icons/file_generic.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/file_git.svg b/assets/icons/file_git.svg new file mode 100644 index 00000000000000..197db2e9e60f26 --- /dev/null +++ b/assets/icons/file_git.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/file_lock.svg b/assets/icons/file_lock.svg new file mode 100644 index 00000000000000..6bfef249b4516f --- /dev/null +++ b/assets/icons/file_lock.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_rust.svg b/assets/icons/file_rust.svg new file mode 100644 index 00000000000000..5db753628af10c --- /dev/null +++ b/assets/icons/file_rust.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/file_toml.svg b/assets/icons/file_toml.svg new file mode 100644 index 00000000000000..9ab78af50f9302 --- /dev/null +++ b/assets/icons/file_toml.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/project.svg b/assets/icons/file_tree.svg similarity index 100% rename from assets/icons/project.svg rename to assets/icons/file_tree.svg diff --git a/assets/icons/folder.svg b/assets/icons/folder.svg new file mode 100644 index 00000000000000..a76dc63d1a6639 --- /dev/null +++ b/assets/icons/folder.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/folder_open.svg b/assets/icons/folder_open.svg new file mode 100644 index 00000000000000..ef37f55f83a38f --- /dev/null +++ b/assets/icons/folder_open.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/stop_sharing.svg b/assets/icons/folder_x.svg similarity index 100% rename from assets/icons/stop_sharing.svg rename to assets/icons/folder_x.svg diff --git a/assets/icons/conversations.svg b/assets/icons/message_bubbles.svg similarity index 100% rename from assets/icons/conversations.svg rename to assets/icons/message_bubbles.svg diff --git a/assets/icons/desktop.svg b/assets/icons/screen.svg similarity index 100% rename from assets/icons/desktop.svg rename to assets/icons/screen.svg diff --git a/assets/icons/settings.svg b/assets/icons/settings.svg new file mode 100644 index 00000000000000..081d25bf482472 --- /dev/null +++ b/assets/icons/settings.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/sliders-alt.svg b/assets/icons/settings_alt.svg similarity index 100% rename from assets/icons/sliders-alt.svg rename to assets/icons/settings_alt.svg diff --git a/assets/icons/speaker_off.svg b/assets/icons/speaker_off.svg deleted file mode 100644 index f60c35de7f3f5b..00000000000000 --- a/assets/icons/speaker_off.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - diff --git a/assets/icons/user_group_16.svg b/assets/icons/user_group.svg similarity index 100% rename from assets/icons/user_group_16.svg rename to assets/icons/user_group.svg diff --git a/assets/icons/word_search.svg b/assets/icons/whole_word.svg similarity index 100% rename from assets/icons/word_search.svg rename to assets/icons/whole_word.svg diff --git a/assets/icons/error.svg b/assets/icons/x_circle.svg similarity index 100% rename from assets/icons/error.svg rename to assets/icons/x_circle.svg diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 4b6508edb074fd..3f567c9e802b80 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -262,7 +262,7 @@ impl ActivityIndicator { if !failed.is_empty() { return Some(Content { icon: Some( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), @@ -280,7 +280,7 @@ impl ActivityIndicator { if let Some(failure) = self.project.read(cx).last_formatting_failure() { return Some(Content { icon: Some( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), @@ -333,7 +333,7 @@ impl ActivityIndicator { }), AutoUpdateStatus::Errored => Some(Content { icon: Some( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .into_any_element(), ), diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 51c9aa9b4ea901..af3abec67c8100 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -4110,7 +4110,7 @@ impl ContextEditor { h_flex() .gap_3() .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) @@ -5235,7 +5235,7 @@ fn quote_selection_fold_placeholder(title: String, editor: WeakView) -> ButtonLike::new(fold_id) .style(ButtonStyle::Filled) .layer(ElevationIndex::ElevatedSurface) - .child(Icon::new(IconName::TextSelect)) + .child(Icon::new(IconName::CursorIBeam)) .child(Label::new(title.clone()).single_line()) .on_click(move |_, cx| { editor @@ -5339,7 +5339,7 @@ fn render_docs_slash_command_trailer( div() .id(("latest-error", row.0)) .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index b01a712a7e4e40..8b71e54746dad6 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1478,7 +1478,7 @@ impl Render for PromptEditor { .child( ModelSelector::new( self.fs.clone(), - IconButton::new("context", IconName::SlidersAlt) + IconButton::new("context", IconName::SettingsAlt) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .icon_color(Color::Muted) diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 6c821bd7b4a5c9..21058306519787 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -193,11 +193,11 @@ impl SlashCommand for DiagnosticsSlashCommand { .map(|(range, placeholder_type)| SlashCommandOutputSection { range, icon: match placeholder_type { - PlaceholderType::Root(_, _) => IconName::ExclamationTriangle, + PlaceholderType::Root(_, _) => IconName::Warning, PlaceholderType::File(_) => IconName::File, PlaceholderType::Diagnostic(DiagnosticType::Error, _) => IconName::XCircle, PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => { - IconName::ExclamationTriangle + IconName::Warning } }, label: match placeholder_type { diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 61a8813f6c55ed..d5c085b646cd86 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -585,7 +585,7 @@ impl Render for PromptEditor { .gap_2() .child(ModelSelector::new( self.fs.clone(), - IconButton::new("context", IconName::SlidersAlt) + IconButton::new("context", IconName::SettingsAlt) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .icon_color(Color::Muted) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 72701101816995..59f83e06548a6b 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2831,7 +2831,7 @@ impl Panel for CollabPanel { fn icon(&self, cx: &gpui::WindowContext) -> Option { CollaborationPanelSettings::get_global(cx) .button - .then_some(ui::IconName::Collab) + .then_some(ui::IconName::UserGroup) } fn icon_tooltip(&self, _cx: &WindowContext) -> Option<&'static str> { diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index ced97be2dc87cd..ddf39e0bfa2424 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -667,7 +667,7 @@ impl Item for ProjectDiagnosticsEditor { then.child( h_flex() .gap_1() - .child(Icon::new(IconName::ExclamationTriangle).color(Color::Warning)) + .child(Icon::new(IconName::Warning).color(Color::Warning)) .child( Label::new(self.summary.warning_count.to_string()) .color(params.text_color()), @@ -804,7 +804,7 @@ fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock { icon.path(IconName::XCircle.path()) .text_color(Color::Error.color(cx)) } else { - icon.path(IconName::ExclamationTriangle.path()) + icon.path(IconName::Warning.path()) .text_color(Color::Warning.color(cx)) } }), diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 80b31b999c653b..72a4ac9bcfb01e 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -30,7 +30,7 @@ impl Render for DiagnosticIndicator { (0, warning_count) => h_flex() .gap_1() .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) @@ -52,7 +52,7 @@ impl Render for DiagnosticIndicator { ) .child(Label::new(error_count.to_string()).size(LabelSize::Small)) .child( - Icon::new(IconName::ExclamationTriangle) + Icon::new(IconName::Warning) .size(IconSize::Small) .color(Color::Warning), ) diff --git a/crates/diagnostics/src/toolbar_controls.rs b/crates/diagnostics/src/toolbar_controls.rs index 64eb2683047171..b546db50a064ba 100644 --- a/crates/diagnostics/src/toolbar_controls.rs +++ b/crates/diagnostics/src/toolbar_controls.rs @@ -50,7 +50,7 @@ impl Render for ToolbarControls { ) }) .child( - IconButton::new("toggle-warnings", IconName::ExclamationTriangle) + IconButton::new("toggle-warnings", IconName::Warning) .tooltip(move |cx| Tooltip::text(tooltip, cx)) .on_click(cx.listener(|this, _, cx| { if let Some(editor) = this.editor() { diff --git a/crates/quick_action_bar/src/quick_action_bar.rs b/crates/quick_action_bar/src/quick_action_bar.rs index 0d530d6821bc83..57418b54b7e05a 100644 --- a/crates/quick_action_bar/src/quick_action_bar.rs +++ b/crates/quick_action_bar/src/quick_action_bar.rs @@ -150,7 +150,7 @@ impl Render for QuickActionBar { let focus = editor.focus_handle(cx); PopoverMenu::new("editor-selections-dropdown") .trigger( - IconButton::new("toggle_editor_selections_icon", IconName::TextCursor) + IconButton::new("toggle_editor_selections_icon", IconName::CursorIBeam) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .style(ButtonStyle::Subtle) diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 903e9eebd272bb..1869e33383d177 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -991,7 +991,7 @@ impl Item for TerminalView { Some(terminal_task) => match &terminal_task.status { TaskStatus::Running => (IconName::Play, Color::Disabled, None), TaskStatus::Unknown => ( - IconName::ExclamationTriangle, + IconName::Warning, Color::Warning, Some(rerun_button(terminal_task.id.clone())), ), diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index fd4f17ac0e0ef7..c2743ecbd9bd72 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -1,6 +1,7 @@ use gpui::{svg, AnimationElement, Hsla, IntoElement, Rems, Transformation}; use serde::{Deserialize, Serialize}; use strum::{EnumIter, EnumString, IntoStaticStr}; +use ui_macros::DerivePathStr; use crate::{prelude::*, Indicator}; @@ -102,15 +103,27 @@ impl IconSize { } #[derive( - Debug, PartialEq, Eq, Copy, Clone, EnumIter, EnumString, IntoStaticStr, Serialize, Deserialize, + Debug, + PartialEq, + Eq, + Copy, + Clone, + EnumIter, + EnumString, + IntoStaticStr, + Serialize, + Deserialize, + DerivePathStr, )] +#[strum(serialize_all = "snake_case")] +#[path_str(prefix = "icons", suffix = ".svg")] pub enum IconName { Ai, AiAnthropic, AiAnthropicHosted, - AiOpenAi, AiGoogle, AiOllama, + AiOpenAi, AiZed, ArrowCircle, ArrowDown, @@ -135,15 +148,13 @@ pub enum IconName { CaseSensitive, Check, ChevronDown, - /// This chevron indicates a popover menu. - ChevronDownSmall, + ChevronDownSmall, // This chevron indicates a popover menu. ChevronLeft, ChevronRight, ChevronUp, ChevronUpDown, Close, Code, - Collab, Command, Context, Control, @@ -153,6 +164,8 @@ pub enum IconName { CopilotInit, Copy, CountdownTimer, + CursorIBeam, + CursorText, Dash, DatabaseZap, Delete, @@ -162,21 +175,20 @@ pub enum IconName { EllipsisVertical, Envelope, Escape, - ExclamationTriangle, Exit, ExpandVertical, ExternalLink, Eye, File, + FileCode, FileDoc, FileGeneric, FileGit, FileLock, FileRust, + FileText, FileToml, FileTree, - FileText, - FileCode, Filter, Folder, FolderOpen, @@ -184,11 +196,11 @@ pub enum IconName { Font, FontSize, FontWeight, - Github, - GenericMinimize, - GenericMaximize, GenericClose, + GenericMaximize, + GenericMinimize, GenericRestore, + Github, Hash, HistoryRerun, Indicator, @@ -228,21 +240,21 @@ pub enum IconName { Rerun, Return, Reveal, - Route, RotateCcw, RotateCw, + Route, Save, Screen, - SearchSelection, SearchCode, + SearchSelection, SelectAll, Server, Settings, + SettingsAlt, Shift, Slash, SlashSquare, Sliders, - SlidersAlt, Snip, Space, Sparkle, @@ -260,191 +272,18 @@ pub enum IconName { SupermavenInit, Tab, Terminal, - TextCursor, - TextSelect, Trash, TriangleRight, Undo, Unpin, Update, + UserGroup, + Visible, + Warning, WholeWord, XCircle, ZedAssistant, ZedAssistantFilled, - Visible, -} - -impl IconName { - pub fn path(self) -> &'static str { - match self { - IconName::Ai => "icons/ai.svg", - IconName::AiAnthropic => "icons/ai_anthropic.svg", - IconName::AiAnthropicHosted => "icons/ai_anthropic_hosted.svg", - IconName::AiOpenAi => "icons/ai_open_ai.svg", - IconName::AiGoogle => "icons/ai_google.svg", - IconName::AiOllama => "icons/ai_ollama.svg", - IconName::AiZed => "icons/ai_zed.svg", - IconName::ArrowCircle => "icons/arrow_circle.svg", - IconName::ArrowDown => "icons/arrow_down.svg", - IconName::ArrowDownFromLine => "icons/arrow_down_from_line.svg", - IconName::ArrowLeft => "icons/arrow_left.svg", - IconName::ArrowRight => "icons/arrow_right.svg", - IconName::ArrowUp => "icons/arrow_up.svg", - IconName::ArrowUpFromLine => "icons/arrow_up_from_line.svg", - IconName::ArrowUpRight => "icons/arrow_up_right.svg", - IconName::AtSign => "icons/at_sign.svg", - IconName::AudioOff => "icons/speaker_off.svg", - IconName::AudioOn => "icons/speaker_loud.svg", - IconName::Backspace => "icons/backspace.svg", - IconName::Bell => "icons/bell.svg", - IconName::BellDot => "icons/bell_dot.svg", - IconName::BellOff => "icons/bell_off.svg", - IconName::BellRing => "icons/bell_ring.svg", - IconName::Bolt => "icons/bolt.svg", - IconName::Book => "icons/book.svg", - IconName::BookCopy => "icons/book_copy.svg", - IconName::BookPlus => "icons/book_plus.svg", - IconName::CaseSensitive => "icons/case_insensitive.svg", - IconName::Check => "icons/check.svg", - IconName::ChevronDown => "icons/chevron_down.svg", - IconName::ChevronDownSmall => "icons/chevron_down_small.svg", - IconName::ChevronLeft => "icons/chevron_left.svg", - IconName::ChevronRight => "icons/chevron_right.svg", - IconName::ChevronUp => "icons/chevron_up.svg", - IconName::ChevronUpDown => "icons/chevron_up_down.svg", - IconName::Close => "icons/x.svg", - IconName::Code => "icons/code.svg", - IconName::Collab => "icons/user_group_16.svg", - IconName::Command => "icons/command.svg", - IconName::Context => "icons/context.svg", - IconName::Control => "icons/control.svg", - IconName::Copilot => "icons/copilot.svg", - IconName::CopilotDisabled => "icons/copilot_disabled.svg", - IconName::CopilotError => "icons/copilot_error.svg", - IconName::CopilotInit => "icons/copilot_init.svg", - IconName::Copy => "icons/copy.svg", - IconName::CountdownTimer => "icons/countdown_timer.svg", - IconName::Dash => "icons/dash.svg", - IconName::DatabaseZap => "icons/database_zap.svg", - IconName::Delete => "icons/delete.svg", - IconName::Disconnected => "icons/disconnected.svg", - IconName::Download => "icons/download.svg", - IconName::Ellipsis => "icons/ellipsis.svg", - IconName::EllipsisVertical => "icons/ellipsis_vertical.svg", - IconName::Envelope => "icons/feedback.svg", - IconName::Escape => "icons/escape.svg", - IconName::ExclamationTriangle => "icons/warning.svg", - IconName::Exit => "icons/exit.svg", - IconName::ExpandVertical => "icons/expand_vertical.svg", - IconName::ExternalLink => "icons/external_link.svg", - IconName::Eye => "icons/eye.svg", - IconName::File => "icons/file.svg", - IconName::FileDoc => "icons/file_icons/book.svg", - IconName::FileGeneric => "icons/file_icons/file.svg", - IconName::FileGit => "icons/file_icons/git.svg", - IconName::FileLock => "icons/file_icons/lock.svg", - IconName::FileRust => "icons/file_icons/rust.svg", - IconName::FileToml => "icons/file_icons/toml.svg", - IconName::FileTree => "icons/project.svg", - IconName::FileCode => "icons/file_code.svg", - IconName::FileText => "icons/file_text.svg", - IconName::Filter => "icons/filter.svg", - IconName::Folder => "icons/file_icons/folder.svg", - IconName::FolderOpen => "icons/file_icons/folder_open.svg", - IconName::FolderX => "icons/stop_sharing.svg", - IconName::Font => "icons/font.svg", - IconName::FontSize => "icons/font_size.svg", - IconName::FontWeight => "icons/font_weight.svg", - IconName::Github => "icons/github.svg", - IconName::GenericMinimize => "icons/generic_minimize.svg", - IconName::GenericMaximize => "icons/generic_maximize.svg", - IconName::GenericClose => "icons/generic_close.svg", - IconName::GenericRestore => "icons/generic_restore.svg", - IconName::Hash => "icons/hash.svg", - IconName::HistoryRerun => "icons/history_rerun.svg", - IconName::Indicator => "icons/indicator.svg", - IconName::IndicatorX => "icons/indicator_x.svg", - IconName::InlayHint => "icons/inlay_hint.svg", - IconName::Library => "icons/library.svg", - IconName::LineHeight => "icons/line_height.svg", - IconName::Link => "icons/link.svg", - IconName::ListTree => "icons/list_tree.svg", - IconName::MagnifyingGlass => "icons/magnifying_glass.svg", - IconName::MailOpen => "icons/mail_open.svg", - IconName::Maximize => "icons/maximize.svg", - IconName::Menu => "icons/menu.svg", - IconName::MessageBubbles => "icons/conversations.svg", - IconName::Mic => "icons/mic.svg", - IconName::MicMute => "icons/mic_mute.svg", - IconName::Microscope => "icons/microscope.svg", - IconName::Minimize => "icons/minimize.svg", - IconName::Option => "icons/option.svg", - IconName::PageDown => "icons/page_down.svg", - IconName::PageUp => "icons/page_up.svg", - IconName::Pencil => "icons/pencil.svg", - IconName::Person => "icons/person.svg", - IconName::Pin => "icons/pin.svg", - IconName::Play => "icons/play.svg", - IconName::Plus => "icons/plus.svg", - IconName::PocketKnife => "icons/pocket_knife.svg", - IconName::Public => "icons/public.svg", - IconName::PullRequest => "icons/pull_request.svg", - IconName::Quote => "icons/quote.svg", - IconName::Regex => "icons/regex.svg", - IconName::ReplNeutral => "icons/repl_neutral.svg", - IconName::Replace => "icons/replace.svg", - IconName::ReplaceAll => "icons/replace_all.svg", - IconName::ReplaceNext => "icons/replace_next.svg", - IconName::ReplyArrowRight => "icons/reply_arrow_right.svg", - IconName::Rerun => "icons/rerun.svg", - IconName::Return => "icons/return.svg", - IconName::Reveal => "icons/reveal.svg", - IconName::RotateCcw => "icons/rotate_ccw.svg", - IconName::RotateCw => "icons/rotate_cw.svg", - IconName::Route => "icons/route.svg", - IconName::Save => "icons/save.svg", - IconName::Screen => "icons/desktop.svg", - IconName::SearchSelection => "icons/search_selection.svg", - IconName::SearchCode => "icons/search_code.svg", - IconName::SelectAll => "icons/select_all.svg", - IconName::Server => "icons/server.svg", - IconName::Settings => "icons/file_icons/settings.svg", - IconName::Shift => "icons/shift.svg", - IconName::Slash => "icons/slash.svg", - IconName::SlashSquare => "icons/slash_square.svg", - IconName::Sliders => "icons/sliders.svg", - IconName::SlidersAlt => "icons/sliders-alt.svg", - IconName::Snip => "icons/snip.svg", - IconName::Space => "icons/space.svg", - IconName::Sparkle => "icons/sparkle.svg", - IconName::SparkleAlt => "icons/sparkle_alt.svg", - IconName::SparkleFilled => "icons/sparkle_filled.svg", - IconName::Spinner => "icons/spinner.svg", - IconName::Split => "icons/split.svg", - IconName::Star => "icons/star.svg", - IconName::StarFilled => "icons/star_filled.svg", - IconName::Stop => "icons/stop.svg", - IconName::Strikethrough => "icons/strikethrough.svg", - IconName::Supermaven => "icons/supermaven.svg", - IconName::SupermavenDisabled => "icons/supermaven_disabled.svg", - IconName::SupermavenError => "icons/supermaven_error.svg", - IconName::SupermavenInit => "icons/supermaven_init.svg", - IconName::Tab => "icons/tab.svg", - IconName::Terminal => "icons/terminal.svg", - IconName::TextCursor => "icons/text-cursor.svg", - IconName::TextSelect => "icons/text_select.svg", - IconName::Trash => "icons/trash.svg", - IconName::TriangleRight => "icons/triangle_right.svg", - IconName::Unpin => "icons/unpin.svg", - IconName::Update => "icons/update.svg", - IconName::Undo => "icons/undo.svg", - IconName::WholeWord => "icons/word_search.svg", - IconName::XCircle => "icons/error.svg", - IconName::ZedAssistant => "icons/zed_assistant.svg", - IconName::ZedAssistantFilled => "icons/zed_assistant_filled.svg", - IconName::Visible => "icons/visible.svg", - } - } } #[derive(IntoElement)] diff --git a/crates/ui/src/components/stories/list_header.rs b/crates/ui/src/components/stories/list_header.rs index 358dc26a875c67..afcae0215abffd 100644 --- a/crates/ui/src/components/stories/list_header.rs +++ b/crates/ui/src/components/stories/list_header.rs @@ -24,7 +24,7 @@ impl Render for ListHeaderStory { .child( ListHeader::new("Section 4") .end_slot(IconButton::new("action_1", IconName::Bolt)) - .end_slot(IconButton::new("action_2", IconName::ExclamationTriangle)) + .end_slot(IconButton::new("action_2", IconName::Warning)) .end_slot(IconButton::new("action_3", IconName::Plus)), ) } diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index 2f8b14ddb725ac..ffab276dd1aedb 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -311,15 +311,11 @@ impl Render for LanguageServerPrompt { .mt(px(-2.0)) .map(|icon| { if severity == DiagnosticSeverity::ERROR { - icon.path( - IconName::ExclamationTriangle.path(), - ) - .text_color(Color::Error.color(cx)) + icon.path(IconName::Warning.path()) + .text_color(Color::Error.color(cx)) } else { - icon.path( - IconName::ExclamationTriangle.path(), - ) - .text_color(Color::Warning.color(cx)) + icon.path(IconName::Warning.path()) + .text_color(Color::Warning.color(cx)) } }) }), @@ -421,7 +417,7 @@ impl Render for ErrorMessagePrompt { .mr_2() .mt(px(-2.0)) .map(|icon| { - icon.path(IconName::ExclamationTriangle.path()) + icon.path(IconName::Warning.path()) .text_color(Color::Error.color(cx)) }), ) From 4d8c3855c229d5aff02d39dd3eac10cf2007585c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sat, 14 Sep 2024 15:44:54 -0400 Subject: [PATCH 077/762] theme: Clamp font sizes between 6px and 100px (#17829) This PR clamps the `ui_font_size` and `buffer_font_size` settings between 6px and 100px. Release Notes: - Changed `ui_font_size` and `buffer_font_size` to require values to be between 6px and 100px (inclusive). --- crates/theme/src/settings.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 5b88d5fcb5ba29..4d0b4f0215aada 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -609,10 +609,14 @@ impl settings::Settings for ThemeSettings { this.apply_theme_overrides(); merge(&mut this.ui_font_size, value.ui_font_size.map(Into::into)); + this.ui_font_size = this.ui_font_size.clamp(px(6.), px(100.)); + merge( &mut this.buffer_font_size, value.buffer_font_size.map(Into::into), ); + this.buffer_font_size = this.buffer_font_size.clamp(px(6.), px(100.)); + merge(&mut this.buffer_line_height, value.buffer_line_height); // Clamp the `unnecessary_code_fade` to ensure text can't disappear entirely. From 00c0a7254a85c1728f8fdca06bce839360139db5 Mon Sep 17 00:00:00 2001 From: Zhang <17492978+zhang0098@users.noreply.github.com> Date: Sun, 15 Sep 2024 03:49:53 +0800 Subject: [PATCH 078/762] gpui: Allow TextInput example to lose and gain focus (#17823) Improved the input.rs example file in gpui crate. The new code * allow this text field to lose and gain input focus. * change TextInput's height from full to fix. Release Notes: - N/A --- crates/gpui/examples/input.rs | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/crates/gpui/examples/input.rs b/crates/gpui/examples/input.rs index cdce4c61c744c9..7e7de269b11462 100644 --- a/crates/gpui/examples/input.rs +++ b/crates/gpui/examples/input.rs @@ -467,9 +467,12 @@ impl Element for TextElement { let line = prepaint.line.take().unwrap(); line.paint(bounds.origin, cx.line_height(), cx).unwrap(); - if let Some(cursor) = prepaint.cursor.take() { - cx.paint_quad(cursor); + if focus_handle.is_focused(cx) { + if let Some(cursor) = prepaint.cursor.take() { + cx.paint_quad(cursor); + } } + self.input.update(cx, |input, _cx| { input.last_layout = Some(line); input.last_bounds = Some(bounds); @@ -499,7 +502,6 @@ impl Render for TextInput { .on_mouse_up_out(MouseButton::Left, cx.listener(Self::on_mouse_up)) .on_mouse_move(cx.listener(Self::on_mouse_move)) .bg(rgb(0xeeeeee)) - .size_full() .line_height(px(30.)) .text_size(px(24.)) .child( @@ -524,6 +526,13 @@ impl FocusableView for TextInput { struct InputExample { text_input: View, recent_keystrokes: Vec, + focus_handle: FocusHandle, +} + +impl FocusableView for InputExample { + fn focus_handle(&self, _: &AppContext) -> FocusHandle { + self.focus_handle.clone() + } } impl InputExample { @@ -540,6 +549,7 @@ impl Render for InputExample { let num_keystrokes = self.recent_keystrokes.len(); div() .bg(rgb(0xaaaaaa)) + .track_focus(&self.focus_handle) .flex() .flex_col() .size_full() @@ -615,9 +625,10 @@ fn main() { last_bounds: None, is_selecting: false, }); - cx.new_view(|_| InputExample { + cx.new_view(|cx| InputExample { text_input, recent_keystrokes: vec![], + focus_handle: cx.focus_handle(), }) }, ) From 40a00fb2249fd9d359eb26a61e2af73950cd8f70 Mon Sep 17 00:00:00 2001 From: krizej <60076189+krizej@users.noreply.github.com> Date: Sat, 14 Sep 2024 22:06:03 +0200 Subject: [PATCH 079/762] Add missing operators and keywords to the C syntax highlighting (#17541) Based on https://en.cppreference.com/w/c/language/expressions#Operators Release Notes: - Added missing operators and keywords to the C syntax highlighting --- crates/languages/src/c/highlights.scm | 40 +++++++++++++++++++++------ 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/crates/languages/src/c/highlights.scm b/crates/languages/src/c/highlights.scm index 0a8c12f06ff0c6..634f8d81c4ad93 100644 --- a/crates/languages/src/c/highlights.scm +++ b/crates/languages/src/c/highlights.scm @@ -9,6 +9,7 @@ "enum" "extern" "for" + "goto" "if" "inline" "return" @@ -35,27 +36,48 @@ ] @keyword [ + "=" + "+=" + "-=" + "*=" + "/=" + "%=" + "&=" + "|=" + "^=" + "<<=" + ">>=" + "++" "--" + "+" "-" - "-=" - "->" - "=" - "!=" "*" + "/" + "%" + "~" "&" + "|" + "^" + "<<" + ">>" + "!" "&&" - "+" - "++" - "+=" - "<" + "||" "==" + "!=" + "<" ">" - "||" + "<=" + ">=" + "->" + "?" + ":" ] @operator [ "." ";" + "," ] @punctuation.delimiter [ From d5268c519721241be876605dc37294edf7eed4a1 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Sun, 15 Sep 2024 04:17:38 +0800 Subject: [PATCH 080/762] docs: Add proxy settings (#17797) I'm not sure if I placed `Network Proxy` in the correct position. What I noticed is that the first half of the documentation seems to be organized alphabetically, but the second half is not. I tried to position `Network Proxy` in a spot that seemed reasonable while maintaining alphabetical order. If there's a better suggestion, I'd be happy to make adjustments. Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- docs/src/configuring-zed.md | 39 +++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 82f5a244845b57..1c4aee533d176a 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1061,6 +1061,45 @@ The following settings can be overridden for each specific language: These values take in the same options as the root-level settings with the same name. +## Network Proxy + +- Description: Configure a network proxy for Zed. +- Setting: `proxy` +- Default: `null` + +**Options** + +The proxy setting must contain a URL to the proxy. + +The following URI schemes are supported: + +- `http` +- `https` +- `socks4` +- `socks4a` +- `socks5` +- `socks5h` + +`http` will be used when no scheme is specified. + +By default no proxy will be used, or Zed will attempt to retrieve proxy settings from environment variables, such as `http_proxy`, `HTTP_PROXY`, `https_proxy`, `HTTPS_PROXY`, `all_proxy`, `ALL_PROXY`. + +For example, to set an `http` proxy, add the following to your settings: + +```json +{ + "proxy": "http://127.0.0.1:10809" +} +``` + +Or to set a `socks5` proxy: + +```json +{ + "proxy": "socks5://localhost:10808" +} +``` + ## Preview tabs - Description: From d56fa258304dacc3a18fb4da94f130047ffdb691 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sat, 14 Sep 2024 17:00:37 -0400 Subject: [PATCH 081/762] context_servers: Hide actions when no context servers are configured (#17833) This PR filters out the context servers actions from the command palette when no context servers are configured. Release Notes: - N/A --- Cargo.lock | 1 + crates/context_servers/Cargo.toml | 1 + crates/context_servers/src/context_servers.rs | 3 ++ crates/context_servers/src/manager.rs | 50 +++++++++++++------ 4 files changed, 39 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 79f4e803a8461a..4cb7a85e359c6e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2780,6 +2780,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", + "command_palette_hooks", "futures 0.3.30", "gpui", "log", diff --git a/crates/context_servers/Cargo.toml b/crates/context_servers/Cargo.toml index 21bf6a1fc86166..9c0336f1217ef0 100644 --- a/crates/context_servers/Cargo.toml +++ b/crates/context_servers/Cargo.toml @@ -14,6 +14,7 @@ path = "src/context_servers.rs" [dependencies] anyhow.workspace = true collections.workspace = true +command_palette_hooks.workspace = true futures.workspace = true gpui.workspace = true log.workspace = true diff --git a/crates/context_servers/src/context_servers.rs b/crates/context_servers/src/context_servers.rs index 3333f95f9fe7aa..55634bb77cc944 100644 --- a/crates/context_servers/src/context_servers.rs +++ b/crates/context_servers/src/context_servers.rs @@ -12,6 +12,9 @@ pub use registry::*; actions!(context_servers, [Restart]); +/// The namespace for the context servers actions. +const CONTEXT_SERVERS_NAMESPACE: &'static str = "context_servers"; + pub fn init(cx: &mut AppContext) { log::info!("initializing context server client"); manager::init(cx); diff --git a/crates/context_servers/src/manager.rs b/crates/context_servers/src/manager.rs index 1596a54eb9bfc6..08e403a43442ea 100644 --- a/crates/context_servers/src/manager.rs +++ b/crates/context_servers/src/manager.rs @@ -15,6 +15,7 @@ //! and react to changes in settings. use collections::{HashMap, HashSet}; +use command_palette_hooks::CommandPaletteFilter; use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Task}; use log; use parking_lot::RwLock; @@ -24,6 +25,7 @@ use settings::{Settings, SettingsSources, SettingsStore}; use std::path::Path; use std::sync::Arc; +use crate::CONTEXT_SERVERS_NAMESPACE; use crate::{ client::{self, Client}, types, @@ -148,26 +150,28 @@ impl ContextServerManager { cx: &mut ModelContext, ) -> Task> { let server_id = config.id.clone(); - let server_id2 = config.id.clone(); if self.servers.contains_key(&server_id) || self.pending_servers.contains(&server_id) { return Task::ready(Ok(())); } - let task = cx.spawn(|this, mut cx| async move { - let server = Arc::new(ContextServer::new(config)); - server.start(&cx).await?; - this.update(&mut cx, |this, cx| { - this.servers.insert(server_id.clone(), server); - this.pending_servers.remove(&server_id); - cx.emit(Event::ServerStarted { - server_id: server_id.clone(), - }); - })?; - Ok(()) - }); + let task = { + let server_id = server_id.clone(); + cx.spawn(|this, mut cx| async move { + let server = Arc::new(ContextServer::new(config)); + server.start(&cx).await?; + this.update(&mut cx, |this, cx| { + this.servers.insert(server_id.clone(), server); + this.pending_servers.remove(&server_id); + cx.emit(Event::ServerStarted { + server_id: server_id.clone(), + }); + })?; + Ok(()) + }) + }; - self.pending_servers.insert(server_id2); + self.pending_servers.insert(server_id); task } @@ -243,15 +247,20 @@ impl GlobalContextServerManager { pub fn init(cx: &mut AppContext) { ContextServerSettings::register(cx); GlobalContextServerManager::register(cx); + + CommandPaletteFilter::update_global(cx, |filter, _cx| { + filter.hide_namespace(CONTEXT_SERVERS_NAMESPACE); + }); + cx.observe_global::(|cx| { let manager = ContextServerManager::global(cx); cx.update_model(&manager, |manager, cx| { let settings = ContextServerSettings::get_global(cx); - let current_servers: HashMap = manager + let current_servers = manager .servers() .into_iter() .map(|server| (server.id.clone(), server.config.clone())) - .collect(); + .collect::>(); let new_servers = settings .servers @@ -279,6 +288,15 @@ pub fn init(cx: &mut AppContext) { for id in servers_to_remove { manager.remove_server(&id, cx).detach_and_log_err(cx); } + + let has_any_context_servers = !manager.servers().is_empty(); + CommandPaletteFilter::update_global(cx, |filter, _cx| { + if has_any_context_servers { + filter.show_namespace(CONTEXT_SERVERS_NAMESPACE); + } else { + filter.hide_namespace(CONTEXT_SERVERS_NAMESPACE); + } + }); }) }) .detach(); From 6f337de440687bdc233c4e6f845f88d44a8f9740 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sat, 14 Sep 2024 17:26:56 -0400 Subject: [PATCH 082/762] ui: Clean up doc comments for `Vector` (#17834) This PR cleans up the doc comments for the `Vector` component. Release Notes: - N/A --- crates/ui/src/components/image.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/ui/src/components/image.rs b/crates/ui/src/components/image.rs index 286fe7f56f3e31..e7eefe5fea3100 100644 --- a/crates/ui/src/components/image.rs +++ b/crates/ui/src/components/image.rs @@ -27,9 +27,9 @@ pub enum VectorName { /// A vector image, such as an SVG. /// -/// A [Vector] is different from an [Icon] in that it is intended +/// A [`Vector`] is different from an [`Icon`] in that it is intended /// to be displayed at a specific size, or series of sizes, rather -/// than conforming to the standard size of an icons. +/// than conforming to the standard size of an icon. #[derive(IntoElement)] pub struct Vector { path: &'static str, @@ -38,7 +38,7 @@ pub struct Vector { } impl Vector { - /// Create a new [Vector] image with the given [VectorName] and size. + /// Creates a new [`Vector`] image with the given [`VectorName`] and size. pub fn new(vector: VectorName, width: Rems, height: Rems) -> Self { Self { path: vector.path(), @@ -47,18 +47,18 @@ impl Vector { } } - /// Create a new [Vector] image where the width and height are the same. + /// Creates a new [`Vector`] image where the width and height are the same. pub fn square(vector: VectorName, size: Rems) -> Self { Self::new(vector, size, size) } - /// Set the image color + /// Sets the vector color. pub fn color(mut self, color: Color) -> Self { self.color = color; self } - /// Set the image size + /// Sets the vector size. pub fn size(mut self, size: impl Into>) -> Self { let size = size.into(); From 4f251429c7e9596641d6716479fbd7a931245197 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Sun, 15 Sep 2024 19:45:06 -0400 Subject: [PATCH 083/762] Add perplexity extension readme (#17861) Release Notes: - N/A --- extensions/perplexity/README.md | 41 +++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 extensions/perplexity/README.md diff --git a/extensions/perplexity/README.md b/extensions/perplexity/README.md new file mode 100644 index 00000000000000..094a876885e97f --- /dev/null +++ b/extensions/perplexity/README.md @@ -0,0 +1,41 @@ +# Zed Perplexity Extension + +This example extension adds the `/perplexity` [slash command](https://zed.dev/docs/assistant/commands) to the Zed AI assistant. + +## Usage + +Open the AI Assistant panel (`cmd-r` or `ctrl-r`) and enter: + +``` +/perplexity What's the weather in Boulder, CO tomorrow evening? +``` + +## Development Setup + +1. Install the rust toolchain and clone the zed repo: +``` +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + +mkdir -p ~/code +cd ~/code +git clone https://github.com/zed-industries/zed +``` + +2. Launch Zed and Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) +3. Open Zed +4. Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) +5. Click "Install Dev Extension" +6. Navigate to the "extensions/perplexity" folder inside the zed git repo. +7. Ensure your `PERPLEXITY_API_KEY` environment variable is set (instructions below) +```sh +env |grep PERPLEXITY_API_KEY +``` +8. Quit and relaunch Zed + +## PERPLEXITY_API_KEY + +This extension requires a Perplexity API key to be available via the `PERPLEXITY_API_KEY` environment variable. + +To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. + +Take your API key and add it to your environment by adding `export PERPLEXITY_API_KEY="pplx-0123456789abcdef..."` to your `~/.zshrc` or `~/.bashrc`. Reload close and reopen your terminal session. Check with `env |grep PERPLEXITY_API_KEY`. From 524a1a6fec96d1824a2999c068df6ccc84956553 Mon Sep 17 00:00:00 2001 From: Chris Veness Date: Mon, 16 Sep 2024 00:45:48 +0100 Subject: [PATCH 084/762] Note in initial_user_settings.json how to access the command palette (#17854) Newcomers might not know / remember how to access the command palette. Release Notes: - N/A --- assets/settings/initial_user_settings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/settings/initial_user_settings.json b/assets/settings/initial_user_settings.json index d8ac1a00216834..71f3beb1d6076e 100644 --- a/assets/settings/initial_user_settings.json +++ b/assets/settings/initial_user_settings.json @@ -5,7 +5,7 @@ // // To see all of Zed's default settings without changing your // custom settings, run `zed: open default settings` from the -// command palette +// command palette (cmd-shift-p / ctrl-shift-p) { "ui_font_size": 16, "buffer_font_size": 16, From cdc3791544bbba0ee36c7b85b51f3df502511249 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Mon, 16 Sep 2024 00:24:18 -0400 Subject: [PATCH 085/762] Fix incorrect icons (#17856) --- assets/icons/cursor_i_beam.svg | 2 +- assets/icons/cursor_text.svg | 1 - assets/icons/text_snippet.svg | 1 + crates/assistant/src/assistant_panel.rs | 2 +- crates/ui/src/components/icon.rs | 2 +- 5 files changed, 4 insertions(+), 4 deletions(-) delete mode 100644 assets/icons/cursor_text.svg create mode 100644 assets/icons/text_snippet.svg diff --git a/assets/icons/cursor_i_beam.svg b/assets/icons/cursor_i_beam.svg index 255635de6a049d..2e7b95b2039455 100644 --- a/assets/icons/cursor_i_beam.svg +++ b/assets/icons/cursor_i_beam.svg @@ -1 +1 @@ - + diff --git a/assets/icons/cursor_text.svg b/assets/icons/cursor_text.svg deleted file mode 100644 index 2e7b95b2039455..00000000000000 --- a/assets/icons/cursor_text.svg +++ /dev/null @@ -1 +0,0 @@ - diff --git a/assets/icons/text_snippet.svg b/assets/icons/text_snippet.svg new file mode 100644 index 00000000000000..255635de6a049d --- /dev/null +++ b/assets/icons/text_snippet.svg @@ -0,0 +1 @@ + diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index af3abec67c8100..8d158193a60f2b 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -5235,7 +5235,7 @@ fn quote_selection_fold_placeholder(title: String, editor: WeakView) -> ButtonLike::new(fold_id) .style(ButtonStyle::Filled) .layer(ElevationIndex::ElevatedSurface) - .child(Icon::new(IconName::CursorIBeam)) + .child(Icon::new(IconName::TextSnippet)) .child(Label::new(title.clone()).single_line()) .on_click(move |_, cx| { editor diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index c2743ecbd9bd72..a2f70244db2690 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -165,7 +165,7 @@ pub enum IconName { Copy, CountdownTimer, CursorIBeam, - CursorText, + TextSnippet, Dash, DatabaseZap, Delete, From 29a5def12ce233f5e98454f60af5ee4af20f25e5 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 16 Sep 2024 07:12:07 -0400 Subject: [PATCH 086/762] Refine assistant config UI (#17871) This PR does a little bit of a touch-up on the copywriting on the assistant config UI. I had friends reporting to me that some of the writing could be clearer, and hopefully, this goes into that direction! Release Notes: - N/A --- assets/icons/sliders_alt.svg | 6 ++++++ assets/icons/sliders_vertical.svg | 11 +++++++++++ crates/assistant/src/assistant_panel.rs | 4 +++- .../language_model/src/provider/anthropic.rs | 10 ++++------ .../src/provider/copilot_chat.rs | 2 +- crates/language_model/src/provider/google.rs | 10 ++++------ crates/language_model/src/provider/open_ai.rs | 18 +++++++++++------- crates/ui/src/components/icon.rs | 2 ++ 8 files changed, 42 insertions(+), 21 deletions(-) create mode 100644 assets/icons/sliders_alt.svg create mode 100644 assets/icons/sliders_vertical.svg diff --git a/assets/icons/sliders_alt.svg b/assets/icons/sliders_alt.svg new file mode 100644 index 00000000000000..36c3feccfede20 --- /dev/null +++ b/assets/icons/sliders_alt.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/sliders_vertical.svg b/assets/icons/sliders_vertical.svg new file mode 100644 index 00000000000000..ab61037a513e8c --- /dev/null +++ b/assets/icons/sliders_vertical.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 8d158193a60f2b..59f5e81d054c3f 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -4117,9 +4117,11 @@ impl ContextEditor { .child(Label::new(label)), ) .child( - Button::new("open-configuration", "Open configuration") + Button::new("open-configuration", "Configure Providers") .size(ButtonSize::Compact) + .icon(Some(IconName::SlidersVertical)) .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) .style(ButtonStyle::Filled) .on_click({ let focus_handle = self.focus_handle(cx).clone(); diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index 9f7135aef7d73f..1e3d2750949f16 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -657,11 +657,10 @@ impl ConfigurationView { impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { const ANTHROPIC_CONSOLE_URL: &str = "https://console.anthropic.com/settings/keys"; - const INSTRUCTIONS: [&str; 4] = [ - "To use the assistant panel or inline assistant, you need to add your Anthropic API key.", - "You can create an API key at:", - "", - "Paste your Anthropic API key below and hit enter to use the assistant:", + const INSTRUCTIONS: [&str; 3] = [ + "To use Zed's assistant with Anthropic, you need to add an API key. Follow these steps:", + "- Create one at:", + "- Paste your API key below and hit enter to use the assistant:", ]; let env_var_set = self.state.read(cx).api_key_from_env; @@ -682,7 +681,6 @@ impl Render for ConfigurationView { ) ) .child(Label::new(INSTRUCTIONS[2])) - .child(Label::new(INSTRUCTIONS[3])) .child( h_flex() .w_full() diff --git a/crates/language_model/src/provider/copilot_chat.rs b/crates/language_model/src/provider/copilot_chat.rs index e21060e54d1e6e..58b486921ab811 100644 --- a/crates/language_model/src/provider/copilot_chat.rs +++ b/crates/language_model/src/provider/copilot_chat.rs @@ -358,7 +358,7 @@ impl Render for ConfigurationView { } _ => { const LABEL: &str = - "To use the assistant panel or inline assistant, you must login to GitHub Copilot. Your GitHub account must have an active Copilot Chat subscription."; + "To use Zed's assistant with GitHub Copilot, you need to be logged in to GitHub. Note that your GitHub account must have an active Copilot Chat subscription."; v_flex().gap_6().child(Label::new(LABEL)).child( v_flex() .gap_2() diff --git a/crates/language_model/src/provider/google.rs b/crates/language_model/src/provider/google.rs index 005f35ff8b1bfe..daa07cdc3977fe 100644 --- a/crates/language_model/src/provider/google.rs +++ b/crates/language_model/src/provider/google.rs @@ -446,11 +446,10 @@ impl ConfigurationView { impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { const GOOGLE_CONSOLE_URL: &str = "https://aistudio.google.com/app/apikey"; - const INSTRUCTIONS: [&str; 4] = [ - "To use the Google AI assistant, you need to add your Google AI API key.", - "You can create an API key at:", - "", - "Paste your Google AI API key below and hit enter to use the assistant:", + const INSTRUCTIONS: [&str; 3] = [ + "To use Zed's assistant with Google AI, you need to add an API key. Follow these steps:", + "- Create one by visiting:", + "- Paste your API key below and hit enter to use the assistant", ]; let env_var_set = self.state.read(cx).api_key_from_env; @@ -472,7 +471,6 @@ impl Render for ConfigurationView { ) ) .child(Label::new(INSTRUCTIONS[2])) - .child(Label::new(INSTRUCTIONS[3])) .child( h_flex() .w_full() diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs index 222c1530412aab..a4efb3baf021ef 100644 --- a/crates/language_model/src/provider/open_ai.rs +++ b/crates/language_model/src/provider/open_ai.rs @@ -497,13 +497,11 @@ impl ConfigurationView { impl Render for ConfigurationView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { const OPENAI_CONSOLE_URL: &str = "https://platform.openai.com/api-keys"; - const INSTRUCTIONS: [&str; 6] = [ - "To use the assistant panel or inline assistant, you need to add your OpenAI API key.", - " - You can create an API key at: ", - " - Make sure your OpenAI account has credits", - " - Having a subscription for another service like GitHub Copilot won't work.", - "", - "Paste your OpenAI API key below and hit enter to use the assistant:", + const INSTRUCTIONS: [&str; 4] = [ + "To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:", + " - Create one by visiting:", + " - Ensure your OpenAI account has credits", + " - Paste your API key below and hit enter to start using the assistant", ]; let env_var_set = self.state.read(cx).api_key_from_env; @@ -543,6 +541,12 @@ impl Render for ConfigurationView { ) .size(LabelSize::Small), ) + .child( + Label::new( + "Note that having a subscription for another service like GitHub Copilot won't work.".to_string(), + ) + .size(LabelSize::Small), + ) .into_any() } else { h_flex() diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index a2f70244db2690..a71c3e98720917 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -255,6 +255,7 @@ pub enum IconName { Slash, SlashSquare, Sliders, + SlidersVertical, Snip, Space, Sparkle, @@ -284,6 +285,7 @@ pub enum IconName { XCircle, ZedAssistant, ZedAssistantFilled, + ZedXCopilot, } #[derive(IntoElement)] From 96a5daaf3f5a4424840705ba282c11ed5c4e8e77 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 16 Sep 2024 07:12:24 -0400 Subject: [PATCH 087/762] Refine symbolic link project tooltip (#17869) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR uses the tooltip with meta to display the info that a project panel item is actually a symbolic link. | Before | After | |--------|--------| | Screenshot 2024-09-16 at 11 20 15 AM | Screenshot 2024-09-16 at 11 19 12 AM | Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c8e1ce28eb5938..cd4196dbc67b15 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2298,7 +2298,7 @@ impl ProjectPanel { .id("symlink_icon") .pr_3() .tooltip(move |cx| { - Tooltip::text(format!("{path} • Symbolic Link"), cx) + Tooltip::with_meta(path.to_string(), None, "Symbolic Link", cx) }) .child( Icon::new(IconName::ArrowUpRight) @@ -2767,7 +2767,6 @@ impl Render for ProjectPanel { .track_focus(&self.focus_handle) .child( Button::new("open_project", "Open a project") - .style(ButtonStyle::Filled) .full_width() .key_binding(KeyBinding::for_action(&workspace::Open, cx)) .on_click(cx.listener(|this, _, cx| { From 4e1bb68620f746a4683af4c335acdd207775a017 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 16 Sep 2024 08:28:27 -0400 Subject: [PATCH 088/762] Use buffer font for the inline assistant (#17875) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/zed-industries/zed/issues/17738 This PR is motivated by having also noticed what the issue above mentions. Looked it further and it does seem the inline assistant had a slightly bigger font-size even though it was using the configured UI font-size as per https://github.com/zed-industries/zed/pull/17542. I'm not sure why that was, technically speaking. However, I ended up realizing that, given we're within the editor, it'd make more sense to use the buffer font instead. With this change, the size mismatch seems to be gone. Screenshot 2024-09-16 at 2 13 28 PM Release Notes: - N/A --- crates/assistant/src/inline_assistant.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 8b71e54746dad6..30a5c98a54372a 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1465,7 +1465,7 @@ impl Render for PromptEditor { .border_y_1() .border_color(cx.theme().status().info_border) .size_full() - .py(cx.line_height() / 2.) + .py(cx.line_height() / 2.5) .on_action(cx.listener(Self::confirm)) .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::move_up)) @@ -1918,12 +1918,11 @@ impl PromptEditor { } else { cx.theme().colors().text }, - font_family: settings.ui_font.family.clone(), - font_features: settings.ui_font.features.clone(), - font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: settings.ui_font_size.into(), - font_weight: settings.ui_font.weight, - line_height: relative(1.3), + font_family: settings.buffer_font.family.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size.into(), + font_weight: settings.buffer_font.weight, + line_height: relative(settings.buffer_line_height.value()), ..Default::default() }; EditorElement::new( From 02dfe08ce8c025a6475905d3f233a37b61ffd3cf Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Mon, 16 Sep 2024 08:29:46 -0400 Subject: [PATCH 089/762] Welcome tweaks (#17874) This PR adds "Open Settings" and "Open Docs" to the welcome page, as well as some minor design polish. The welcome page needs a full redesign at some point so I didn't too to much here in terms of structure/content. Before | After: ![CleanShot 2024-09-16 at 08 12 23@2x](https://github.com/user-attachments/assets/722175ec-d129-4060-827f-f02f572115da) --- Release Notes: - Improved welcome page design and added additional links. --- Cargo.lock | 1 + crates/welcome/Cargo.toml | 3 ++- crates/welcome/src/welcome.rs | 43 ++++++++++++++++++++++++----------- 3 files changed, 33 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4cb7a85e359c6e..b752fc82927666 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13273,6 +13273,7 @@ dependencies = [ "util", "vim", "workspace", + "zed_actions", ] [[package]] diff --git a/crates/welcome/Cargo.toml b/crates/welcome/Cargo.toml index e747072cdeba70..0db1af9252ebcd 100644 --- a/crates/welcome/Cargo.toml +++ b/crates/welcome/Cargo.toml @@ -17,11 +17,11 @@ test-support = [] [dependencies] anyhow.workspace = true client.workspace = true -inline_completion_button.workspace = true db.workspace = true extensions_ui.workspace = true fuzzy.workspace = true gpui.workspace = true +inline_completion_button.workspace = true install_cli.workspace = true picker.workspace = true project.workspace = true @@ -33,6 +33,7 @@ ui.workspace = true util.workspace = true vim.workspace = true workspace.workspace = true +zed_actions.workspace = true [dev-dependencies] editor = { workspace = true, features = ["test-support"] } diff --git a/crates/welcome/src/welcome.rs b/crates/welcome/src/welcome.rs index fc837c68671a31..1be2567c0af003 100644 --- a/crates/welcome/src/welcome.rs +++ b/crates/welcome/src/welcome.rs @@ -25,6 +25,7 @@ pub use multibuffer_hint::*; actions!(welcome, [ResetHints]); pub const FIRST_OPEN: &str = "first_open"; +pub const DOCS_URL: &str = "https://zed.dev/docs/"; pub fn init(cx: &mut AppContext) { BaseKeymap::register(cx); @@ -74,27 +75,22 @@ impl Render for WelcomePage { .track_focus(&self.focus_handle) .child( v_flex() - .w_96() - .gap_4() + .w_80() + .gap_6() .mx_auto() .child( svg() .path("icons/logo_96.svg") - .text_color(gpui::white()) - .w(px(96.)) - .h(px(96.)) + .text_color(cx.theme().colors().icon_disabled) + .w(px(80.)) + .h(px(80.)) .mx_auto(), ) - .child( - h_flex() - .justify_center() - .child(Label::new("Code at the speed of thought")), - ) .child( v_flex() .gap_2() .child( - Button::new("choose-theme", "Choose a theme") + Button::new("choose-theme", "Choose Theme") .full_width() .on_click(cx.listener(|this, _, cx| { this.telemetry.report_app_event( @@ -112,7 +108,7 @@ impl Render for WelcomePage { })), ) .child( - Button::new("choose-keymap", "Choose a keymap") + Button::new("choose-keymap", "Choose Keymap") .full_width() .on_click(cx.listener(|this, _, cx| { this.telemetry.report_app_event( @@ -129,6 +125,27 @@ impl Render for WelcomePage { .ok(); })), ) + .child( + Button::new("edit settings", "Edit Settings") + .full_width() + .on_click(cx.listener(|this, _, cx| { + this.telemetry.report_app_event( + "welcome page: edit settings".to_string(), + ); + cx.dispatch_action(Box::new(zed_actions::OpenSettings)); + })), + ) + .child(Button::new("view docs", "View Docs").full_width().on_click( + cx.listener(|this, _, cx| { + this.telemetry + .report_app_event("welcome page: view docs".to_string()); + cx.open_url(DOCS_URL); + }), + )), + ) + .child( + v_flex() + .gap_2() .when(cfg!(target_os = "macos"), |el| { el.child( Button::new("install-cli", "Install the CLI") @@ -304,7 +321,7 @@ impl Item for WelcomePage { type Event = ItemEvent; fn tab_content_text(&self, _cx: &WindowContext) -> Option { - Some("Welcome to Zed!".into()) + Some("Welcome".into()) } fn telemetry_event_text(&self) -> Option<&'static str> { From f986513d0dc0f5ddc88089229970afcbb3f4453b Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 16 Sep 2024 09:00:20 -0400 Subject: [PATCH 090/762] gpui: Remove debug print statement (#17878) Release Notes: - N/A --- crates/gpui/src/app.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ac7d5eb47bb99e..564b8934897734 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1504,9 +1504,3 @@ pub struct KeystrokeEvent { /// The action that was resolved for the keystroke, if any pub action: Option>, } - -impl Drop for AppContext { - fn drop(&mut self) { - println!("Dropping the App Context"); - } -} From e3d54b22118f54d13488707bc3cb9f0e9143b1c8 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 16 Sep 2024 09:01:20 -0400 Subject: [PATCH 091/762] vim: Fix ctrl-b not moving the cursor (#17808) Closes #17687 Release Notes: - Fixed `ctrl-b` not moving the cursor. --------- Co-authored-by: Abdelhakim Qbaich Co-authored-by: Pete LeVasseur --- crates/editor/src/scroll/scroll_amount.rs | 29 ++++++- crates/vim/src/normal/scroll.rs | 92 +++++++++++++++++++++-- crates/vim/test_data/test_ctrl_f_b.json | 24 ++++++ 3 files changed, 136 insertions(+), 9 deletions(-) create mode 100644 crates/vim/test_data/test_ctrl_f_b.json diff --git a/crates/editor/src/scroll/scroll_amount.rs b/crates/editor/src/scroll/scroll_amount.rs index d115be68a0d4d0..ee80b3d86f7aa9 100644 --- a/crates/editor/src/scroll/scroll_amount.rs +++ b/crates/editor/src/scroll/scroll_amount.rs @@ -1,6 +1,18 @@ use serde::Deserialize; use ui::{px, Pixels}; +#[derive(Debug)] +pub enum ScrollDirection { + Upwards, + Downwards, +} + +impl ScrollDirection { + pub fn is_upwards(&self) -> bool { + matches!(self, ScrollDirection::Upwards) + } +} + #[derive(Debug, Clone, PartialEq, Deserialize)] pub enum ScrollAmount { // Scroll N lines (positive is towards the end of the document) @@ -15,7 +27,7 @@ impl ScrollAmount { Self::Line(count) => *count, Self::Page(count) => { // for full pages subtract one to leave an anchor line - if count.abs() == 1.0 { + if self.is_full_page() { visible_line_count -= 1.0 } (visible_line_count * count).trunc() @@ -29,4 +41,19 @@ impl ScrollAmount { ScrollAmount::Page(x) => px(height.0 * x), } } + + pub fn is_full_page(&self) -> bool { + match self { + ScrollAmount::Page(count) if count.abs() == 1.0 => true, + _ => false, + } + } + + pub fn direction(&self) -> ScrollDirection { + match self { + Self::Line(amount) if amount.is_sign_positive() => ScrollDirection::Downwards, + Self::Page(amount) if amount.is_sign_positive() => ScrollDirection::Downwards, + _ => ScrollDirection::Upwards, + } + } } diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index f89faa3748372f..8d1443e6339028 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -73,14 +73,24 @@ fn scroll_editor( return; } - editor.scroll_screen(amount, cx); + let full_page_up = amount.is_full_page() && amount.direction().is_upwards(); + let amount = match (amount.is_full_page(), editor.visible_line_count()) { + (true, Some(visible_line_count)) => { + if amount.direction().is_upwards() { + ScrollAmount::Line(amount.lines(visible_line_count) + 1.0) + } else { + ScrollAmount::Line(amount.lines(visible_line_count) - 1.0) + } + } + _ => amount.clone(), + }; + + editor.scroll_screen(&amount, cx); if !should_move_cursor { return; } - let visible_line_count = if let Some(visible_line_count) = editor.visible_line_count() { - visible_line_count - } else { + let Some(visible_line_count) = editor.visible_line_count() else { return; }; @@ -115,11 +125,18 @@ fn scroll_editor( } else { DisplayRow(top.row().0 + vertical_scroll_margin) }; - let max_row = DisplayRow(map.max_point().row().0.max(top.row().0.saturating_add( - (visible_line_count as u32).saturating_sub(1 + vertical_scroll_margin), - ))); - let new_row = if head.row() < min_row { + let max_visible_row = top.row().0.saturating_add( + (visible_line_count as u32).saturating_sub(1 + vertical_scroll_margin), + ); + let max_row = DisplayRow(map.max_point().row().0.max(max_visible_row)); + + let new_row = if full_page_up { + // Special-casing ctrl-b/page-up, which is special-cased by Vim, it seems + // to always put the cursor on the last line of the page, even if the cursor + // was before that. + DisplayRow(max_visible_row) + } else if head.row() < min_row { min_row } else if head.row() > max_row { max_row @@ -251,6 +268,7 @@ mod test { ) }); } + #[gpui::test] async fn test_ctrl_d_u(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; @@ -282,6 +300,64 @@ mod test { cx.shared_state().await.assert_matches(); } + #[gpui::test] + async fn test_ctrl_f_b(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + let visible_lines = 10; + cx.set_scroll_height(visible_lines).await; + + // First test without vertical scroll margin + cx.neovim.set_option(&format!("scrolloff={}", 0)).await; + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings::(cx, |s| { + s.vertical_scroll_margin = Some(0.0) + }); + }); + + let content = "ˇ".to_owned() + &sample_text(26, 2, 'a'); + cx.set_shared_state(&content).await; + + // scroll down: ctrl-f + cx.simulate_shared_keystrokes("ctrl-f").await; + cx.shared_state().await.assert_matches(); + + cx.simulate_shared_keystrokes("ctrl-f").await; + cx.shared_state().await.assert_matches(); + + // scroll up: ctrl-b + cx.simulate_shared_keystrokes("ctrl-b").await; + cx.shared_state().await.assert_matches(); + + cx.simulate_shared_keystrokes("ctrl-b").await; + cx.shared_state().await.assert_matches(); + + // Now go back to start of file, and test with vertical scroll margin + cx.simulate_shared_keystrokes("g g").await; + cx.shared_state().await.assert_matches(); + + cx.neovim.set_option(&format!("scrolloff={}", 3)).await; + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings::(cx, |s| { + s.vertical_scroll_margin = Some(3.0) + }); + }); + + // scroll down: ctrl-f + cx.simulate_shared_keystrokes("ctrl-f").await; + cx.shared_state().await.assert_matches(); + + cx.simulate_shared_keystrokes("ctrl-f").await; + cx.shared_state().await.assert_matches(); + + // scroll up: ctrl-b + cx.simulate_shared_keystrokes("ctrl-b").await; + cx.shared_state().await.assert_matches(); + + cx.simulate_shared_keystrokes("ctrl-b").await; + cx.shared_state().await.assert_matches(); + } + #[gpui::test] async fn test_scroll_beyond_last_line(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/test_data/test_ctrl_f_b.json b/crates/vim/test_data/test_ctrl_f_b.json new file mode 100644 index 00000000000000..19c94d8b6e94b2 --- /dev/null +++ b/crates/vim/test_data/test_ctrl_f_b.json @@ -0,0 +1,24 @@ +{"SetOption":{"value":"scrolloff=3"}} +{"SetOption":{"value":"lines=12"}} +{"SetOption":{"value":"scrolloff=0"}} +{"Put":{"state":"ˇaa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz"}} +{"Key":"ctrl-f"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nˇii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-f"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nˇqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-b"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nˇrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-b"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\nˇjj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"g"} +{"Key":"g"} +{"Get":{"state":"ˇaa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"SetOption":{"value":"scrolloff=3"}} +{"Key":"ctrl-f"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nˇll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-f"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\nˇtt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-b"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\ngg\nhh\nii\njj\nkk\nll\nmm\nnn\nˇoo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} +{"Key":"ctrl-b"} +{"Get":{"state":"aa\nbb\ncc\ndd\nee\nff\nˇgg\nhh\nii\njj\nkk\nll\nmm\nnn\noo\npp\nqq\nrr\nss\ntt\nuu\nvv\nww\nxx\nyy\nzz","mode":"Normal"}} From 2baa704af7e6150e99da63d19a9fa42223e8acce Mon Sep 17 00:00:00 2001 From: ihavecoke Date: Mon, 16 Sep 2024 22:29:42 +0800 Subject: [PATCH 092/762] Clamp `tab_size` setting between 1 and 16 (#17882) Release Notes: - Changed `tab_size` setting to require values be between 1 and 16 (inclusive). ### jetbrains settings #### Max value ![image](https://github.com/user-attachments/assets/54b772fd-e670-4d77-b3e9-757b08659f55) When the maximum value is exceeded, the configuration box turns red to remind the user ![image](https://github.com/user-attachments/assets/fcdb6313-be97-4528-b654-5900bcaeafec) If the user still saves, jetbrains does not process it and resets it to the system default value of 4 image Without restrictions, I feel not good. Here is a random setting of a relatively large value https://github.com/user-attachments/assets/c3bdf262-ba08-4bc2-996a-5ad2a37c567f --------- Co-authored-by: Marshall Bowers --- crates/language/src/language_settings.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index e1fcaaba28b4f1..b465173cee1e4d 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -1027,6 +1027,10 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent } merge(&mut settings.tab_size, src.tab_size); + settings.tab_size = settings + .tab_size + .clamp(NonZeroU32::new(1).unwrap(), NonZeroU32::new(16).unwrap()); + merge(&mut settings.hard_tabs, src.hard_tabs); merge(&mut settings.soft_wrap, src.soft_wrap); merge(&mut settings.use_autoclose, src.use_autoclose); From 2cae6f3e088b1031a0a3ffe304613b1aaed454d1 Mon Sep 17 00:00:00 2001 From: Yohanes Bandung Bondowoso Date: Mon, 16 Sep 2024 22:03:51 +0700 Subject: [PATCH 093/762] dart: Respect LSP binary settings (#17494) Enable configuring Dart's LSP from other means of installation types. Some users don't install the `dart` binary, but uses version manager. In the example, I uses [FVM](https://fvm.app/) (short for "Flutter Version Manager"). I have tested this with "Install Dev Extensions". Release Notes: - N/A cc other maintainer: @agent3bood @flo80 --------- Co-authored-by: Marshall Bowers --- docs/src/languages/dart.md | 17 ++++++++++++ extensions/dart/extension.toml | 2 +- extensions/dart/src/dart.rs | 51 ++++++++++++++++++++++++++++++---- 3 files changed, 63 insertions(+), 7 deletions(-) diff --git a/docs/src/languages/dart.md b/docs/src/languages/dart.md index 6f80857c88cc90..6571166b6d3061 100644 --- a/docs/src/languages/dart.md +++ b/docs/src/languages/dart.md @@ -5,6 +5,23 @@ Dart support is available through the [Dart extension](https://github.com/zed-in - Tree Sitter: [UserNobody14/tree-sitter-dart](https://github.com/UserNobody14/tree-sitter-dart) - Language Server: [dart language-server](https://github.com/dart-lang/sdk) +## Configuration + +The `dart` binary can be configured in a Zed settings file with: + +```json +{ + "lsp": { + "dart": { + "binary": { + "path": "/opt/homebrew/bin/fvm", + "arguments": ["dart", "language-server", "--protocol=lsp"] + } + } + } +} +``` + If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 5 +++-- crates/audio/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 111360b96547bd..b31beee09c62d7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9278,12 +9278,13 @@ dependencies = [ [[package]] name = "rodio" -version = "0.17.3" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b1bb7b48ee48471f55da122c0044fcc7600cfcc85db88240b89cb832935e611" +checksum = "6006a627c1a38d37f3d3a85c6575418cfe34a5392d60a686d0071e1c8d427acb" dependencies = [ "cpal", "hound", + "thiserror", ] [[package]] diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index bfe22de1f07482..9502b58f93274e 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -18,5 +18,5 @@ collections.workspace = true derive_more.workspace = true gpui.workspace = true parking_lot.workspace = true -rodio = { version = "0.17.1", default-features = false, features = ["wav"] } +rodio = { version = "0.19.0", default-features = false, features = ["wav"] } util.workspace = true From 7d97855ed7fd7da2626a90baf9a6e7cba4f11439 Mon Sep 17 00:00:00 2001 From: Galen Elias Date: Mon, 16 Sep 2024 21:23:03 -0700 Subject: [PATCH 121/762] Use AppContext for UI font adjustments (#17858) Appologies if this PR is off base, I'm still not super familiar with the Zed codebase. I was trying to integrate with https://github.com/zed-industries/zed/pull/12940 and found it awkward to hook up global bindings to adjust the UI font size due to the fact it takes a WindowContext. Looking at the API, it seemed odd that it took a WindowContext, yet the editor font methods take an AppContext. I couldn't find a compelling reason for this to be tied to a WindowContext, so I personally think it makes sense to switch it. This does have a behavior change, which hopefully is actually desirable: Currently, if you have two open and visible Zed windows, and trigger a UI font adjustment in one, the non-active windows won't update. However, once you switch focus to the second one it will snap to the new UI font size. This is inconsistent with adjusting the editor font size, which applies to all open windows immediately. Release Notes: - N/A --- crates/theme/src/settings.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 4d0b4f0215aada..7fa9a870de559b 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -491,13 +491,13 @@ pub fn setup_ui_font(cx: &mut WindowContext) -> gpui::Font { ui_font } -pub fn get_ui_font_size(cx: &WindowContext) -> Pixels { +pub fn get_ui_font_size(cx: &AppContext) -> Pixels { let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; cx.try_global::() .map_or(ui_font_size, |adjusted_size| adjusted_size.0) } -pub fn adjust_ui_font_size(cx: &mut WindowContext, f: fn(&mut Pixels)) { +pub fn adjust_ui_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; let mut adjusted_size = cx .try_global::() @@ -513,7 +513,7 @@ pub fn has_adjusted_ui_font_size(cx: &mut AppContext) -> bool { cx.has_global::() } -pub fn reset_ui_font_size(cx: &mut WindowContext) { +pub fn reset_ui_font_size(cx: &mut AppContext) { if cx.has_global::() { cx.remove_global::(); cx.refresh(); From d56e3d99b4a0dddbacd4e762c9be83d2f39f48ba Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 10:17:03 +0200 Subject: [PATCH 122/762] rust: Fix looking up `rust-analyzer` in `$PATH` by default (#17926) This is a follow-up to https://github.com/zed-industries/zed/pull/17885, which is reverted and fixed in this PR. This PR actually enables the behavior by default. Release Notes: - Changed `rust-analyzer` support to lookup `rust-analyzer` binaries by default in `$PATH`. That changes the default value to something users requested. --- crates/languages/src/rust.rs | 82 ++++++++++++++++++++---------------- 1 file changed, 46 insertions(+), 36 deletions(-) diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index d102276e440c09..456ea8e4492eda 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -38,45 +38,55 @@ impl LspAdapter for RustLspAdapter { delegate: &dyn LspAdapterDelegate, cx: &AsyncAppContext, ) -> Option { - let configured_binary = cx.update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) - }); - - match configured_binary { - Ok(Some(BinarySettings { - path, + let configured_binary = cx + .update(|cx| { + language_server_settings(delegate, Self::SERVER_NAME, cx) + .and_then(|s| s.binary.clone()) + }) + .ok()?; + + let (path, env, arguments) = match configured_binary { + // If nothing is configured, or path_lookup explicitly enabled, + // we lookup the binary in the path. + None + | Some(BinarySettings { + path: None, + path_lookup: Some(true), + .. + }) + | Some(BinarySettings { + path: None, + path_lookup: None, + .. + }) => { + let path = delegate.which(Self::SERVER_NAME.as_ref()).await; + let env = delegate.shell_env().await; + (path, Some(env), None) + } + // Otherwise, we use the configured binary. + Some(BinarySettings { + path: Some(path), arguments, path_lookup, - })) => { - let (path, env) = match (path, path_lookup) { - (Some(path), lookup) => { - if lookup.is_some() { - log::warn!( - "Both `path` and `path_lookup` are set, ignoring `path_lookup`" - ); - } - (Some(path.into()), None) - } - (None, Some(true)) | (None, None) => { - // Try to lookup rust-analyzer in PATH by default. - let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; - let env = delegate.shell_env().await; - (Some(path), Some(env)) - } - (None, Some(false)) => (None, None), - }; - path.map(|path| LanguageServerBinary { - path, - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), - env, - }) + }) => { + if path_lookup.is_some() { + log::warn!("Both `path` and `path_lookup` are set, ignoring `path_lookup`"); + } + (Some(path.into()), None, arguments) } - _ => None, - } + + _ => (None, None, None), + }; + + path.map(|path| LanguageServerBinary { + path, + env, + arguments: arguments + .unwrap_or_default() + .iter() + .map(|arg| arg.into()) + .collect(), + }) } async fn fetch_latest_server_version( From 5f0925fb5d26704e37ad943859af2db3ffa57892 Mon Sep 17 00:00:00 2001 From: ClanEver <562211524@qq.com> Date: Tue, 17 Sep 2024 17:17:29 +0800 Subject: [PATCH 123/762] Add Python venv activation support for Windows and PowerShell (#17839) Release Notes: - Add Python venv activation support for Windows and PowerShell Additional: I discovered a related bug on my Windows system. When first opening the project, it fails to detect the virtual environment folder `.venv`. After expanding the .venv folder in the Project Panel, it then becomes able to detect the virtual environment folder. However, I don't know how to fix it. --- assets/settings/default.json | 2 +- crates/project/src/terminals.rs | 31 ++++++++++++++++++------ crates/terminal/src/terminal_settings.rs | 1 + 3 files changed, 26 insertions(+), 8 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 7c0dc831c31fdc..7f8c823210efa3 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -710,7 +710,7 @@ // to the current working directory. We recommend overriding this // in your project's settings, rather than globally. "directories": [".env", "env", ".venv", "venv"], - // Can also be `csh`, `fish`, and `nushell` + // Can also be `csh`, `fish`, `nushell` and `power_shell` "activate_script": "default" } }, diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 6793c028f71795..136842d158604c 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -259,12 +259,16 @@ impl Project { cx: &AppContext, ) -> Option { let venv_settings = settings.detect_venv.as_option()?; + let bin_dir_name = match std::env::consts::OS { + "windows" => "Scripts", + _ => "bin", + }; venv_settings .directories .iter() .map(|virtual_environment_name| abs_path.join(virtual_environment_name)) .find(|venv_path| { - let bin_path = venv_path.join("bin"); + let bin_path = venv_path.join(bin_dir_name); self.find_worktree(&bin_path, cx) .and_then(|(worktree, relative_path)| { worktree.read(cx).entry_for_path(&relative_path) @@ -279,23 +283,36 @@ impl Project { settings: &TerminalSettings, ) -> Option { let venv_settings = settings.detect_venv.as_option()?; + let activate_keyword = match venv_settings.activate_script { + terminal_settings::ActivateScript::Default => match std::env::consts::OS { + "windows" => ".", + _ => "source", + }, + terminal_settings::ActivateScript::Nushell => "overlay use", + terminal_settings::ActivateScript::PowerShell => ".", + _ => "source", + }; let activate_script_name = match venv_settings.activate_script { terminal_settings::ActivateScript::Default => "activate", terminal_settings::ActivateScript::Csh => "activate.csh", terminal_settings::ActivateScript::Fish => "activate.fish", terminal_settings::ActivateScript::Nushell => "activate.nu", + terminal_settings::ActivateScript::PowerShell => "activate.ps1", }; let path = venv_base_directory - .join("bin") + .join(match std::env::consts::OS { + "windows" => "Scripts", + _ => "bin", + }) .join(activate_script_name) .to_string_lossy() .to_string(); let quoted = shlex::try_quote(&path).ok()?; - - Some(match venv_settings.activate_script { - terminal_settings::ActivateScript::Nushell => format!("overlay use {}\n", quoted), - _ => format!("source {}\n", quoted), - }) + let line_ending = match std::env::consts::OS { + "windows" => "\r", + _ => "\n", + }; + Some(format!("{} {}{}", activate_keyword, quoted, line_ending)) } fn activate_python_virtual_environment( diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index 6c950526124016..4051caf864d25f 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -87,6 +87,7 @@ pub enum ActivateScript { Csh, Fish, Nushell, + PowerShell, } #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] From c34fc5c6e504bb66bc8973a2c95fc358b613cfea Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 11:47:17 +0200 Subject: [PATCH 124/762] lsp store: Refactor to use shared method to find buffer snapshot (#17929) Came across this code while investigating something else and I think we should use the same method. As far as I know, it does the same thing, except that `buffer_snapshot_for_lsp_version` also cleans up the stored snapshots. Release Notes: - N/A --- crates/project/src/lsp_store.rs | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index ee02492dd8cfda..fb3d52eb9ebc62 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -6383,21 +6383,16 @@ impl LspStore { let buffer_id = buffer_to_edit.read(cx).remote_id(); let version = if let Some(buffer_version) = op.text_document.version { - this.buffer_snapshots - .get(&buffer_id) - .and_then(|server_to_snapshots| { - let all_snapshots = server_to_snapshots - .get(&language_server.server_id())?; - all_snapshots - .binary_search_by_key(&buffer_version, |snapshot| { - snapshot.version - }) - .ok() - .and_then(|index| all_snapshots.get(index)) - }) - .map(|lsp_snapshot| lsp_snapshot.snapshot.version()) + this.buffer_snapshot_for_lsp_version( + &buffer_to_edit, + language_server.server_id(), + Some(buffer_version), + cx, + ) + .ok() + .map(|snapshot| snapshot.version) } else { - Some(buffer_to_edit.read(cx).saved_version()) + Some(buffer_to_edit.read(cx).saved_version().clone()) }; let most_recent_edit = version.and_then(|version| { From 2165d52d3e97d0477d9769a2c4138b9e6d4b0531 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 14:33:53 +0200 Subject: [PATCH 125/762] project: Update variable and change comment (#17933) Previous this *was* the `cli_environment`, but now it's the project environment. Release Notes: - N/A --- crates/language/src/language_registry.rs | 15 ++++++++------- crates/project/src/lsp_store.rs | 4 ++-- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 4870c4ed539157..918da4873f8667 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -860,7 +860,7 @@ impl LanguageRegistry { adapter: Arc, root_path: Arc, delegate: Arc, - cli_environment: Shared>>>, + project_environment: Shared>>>, cx: &mut AppContext, ) -> Option { let server_id = self.state.write().next_language_server_id(); @@ -881,7 +881,7 @@ impl LanguageRegistry { let task = cx.spawn({ let container_dir = container_dir.clone(); move |mut cx| async move { - let cli_environment = cli_environment.await; + let project_environment = project_environment.await; let binary_result = adapter .clone() @@ -892,15 +892,16 @@ impl LanguageRegistry { let mut binary = binary_result?; - // If this Zed project was opened from the CLI and the language server command itself + // If we do have a project environment (either by spawning a shell in in the project directory + // or by getting it from the CLI) and the language server command itself // doesn't have an environment (which it would have, if it was found in $PATH), then - // we pass along the CLI environment that we inherited. - if binary.env.is_none() && cli_environment.is_some() { + // we use the project environment. + if binary.env.is_none() && project_environment.is_some() { log::info!( - "using CLI environment for language server {:?}, id: {server_id}", + "using project environment for language server {:?}, id: {server_id}", adapter.name.0 ); - binary.env = cli_environment.clone(); + binary.env = project_environment.clone(); } let options = adapter diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index fb3d52eb9ebc62..e1e6001d24b96b 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4646,7 +4646,7 @@ impl LspStore { let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); let lsp_adapter_delegate = ProjectLspAdapterDelegate::for_local(self, worktree_handle, cx); - let cli_environment = local.environment.update(cx, |environment, cx| { + let project_environment = local.environment.update(cx, |environment, cx| { environment.get_environment(Some(worktree_id), Some(worktree_path.clone()), cx) }); @@ -4656,7 +4656,7 @@ impl LspStore { adapter.clone(), Arc::clone(&worktree_path), lsp_adapter_delegate.clone(), - cli_environment, + project_environment, cx, ) { Some(pending_server) => pending_server, From 103f757c114c2712df5f98f05bb64ff1ff9ef2d0 Mon Sep 17 00:00:00 2001 From: Daste Date: Tue, 17 Sep 2024 14:48:05 +0200 Subject: [PATCH 126/762] tab_switcher: Add file and project search icons (#17115) I found tab switcher file icons to be missing. They were mentioned in the [initial tab switcher issue](https://github.com/zed-industries/zed/issues/7653), but left to be added later (mentioned in https://github.com/zed-industries/zed/pull/7987). I also noticed that the project search icon went missing, but I'm not sure if that's intentional. These changes re-introduce it, as it's provided by the generic `tab_icon()` function. There's a small difference between the terminal item and everything else, because terminal's `tab_content` returns a slightly different layout, which adds a little more space between the icon and text. I'll look into resolving this withouth changing too much stuff around in the terminal crate. If you have any ideas on how to do this well, please comment. The new `tab_switcher` config section only has a single boolean option - `show_icons`. It toggles between icons and not icons, but doesn't disable the terminal icon. Implementing this would probably also require some refactoring in terminal's `tab_content` function. Release Notes: - Added file icons to the tab switcher Screenshot: ![image](https://github.com/user-attachments/assets/17f3f4a3-1f95-4830-aef1-cda280726385) --- crates/diagnostics/src/diagnostics.rs | 59 ++++++++++++----------- crates/tab_switcher/src/tab_switcher.rs | 3 ++ crates/terminal_view/src/terminal_view.rs | 2 +- 3 files changed, 36 insertions(+), 28 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index ddf39e0bfa2424..eec4f735ec38e5 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -645,37 +645,42 @@ impl Item for ProjectDiagnosticsEditor { } fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement { - if self.summary.error_count == 0 && self.summary.warning_count == 0 { - Label::new("No problems") - .color(params.text_color()) - .into_any_element() - } else { - h_flex() - .gap_1() - .when(self.summary.error_count > 0, |then| { + h_flex() + .gap_1() + .when( + self.summary.error_count == 0 && self.summary.warning_count == 0, + |then| { then.child( h_flex() .gap_1() - .child(Icon::new(IconName::XCircle).color(Color::Error)) - .child( - Label::new(self.summary.error_count.to_string()) - .color(params.text_color()), - ), + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(Label::new("No problems").color(params.text_color())), ) - }) - .when(self.summary.warning_count > 0, |then| { - then.child( - h_flex() - .gap_1() - .child(Icon::new(IconName::Warning).color(Color::Warning)) - .child( - Label::new(self.summary.warning_count.to_string()) - .color(params.text_color()), - ), - ) - }) - .into_any_element() - } + }, + ) + .when(self.summary.error_count > 0, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child( + Label::new(self.summary.error_count.to_string()) + .color(params.text_color()), + ), + ) + }) + .when(self.summary.warning_count > 0, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Warning).color(Color::Warning)) + .child( + Label::new(self.summary.warning_count.to_string()) + .color(params.text_color()), + ), + ) + }) + .into_any_element() } fn telemetry_event_text(&self) -> Option<&'static str> { diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index e8966ac5b91769..0b3eaf9e8a5d2a 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -378,6 +378,9 @@ impl PickerDelegate for TabSwitcherDelegate { .inset(true) .selected(selected) .child(h_flex().w_full().child(label)) + .when_some(tab_match.item.tab_icon(cx), |el, icon| { + el.start_slot(div().child(icon)) + }) .map(|el| { if self.selected_index == ix { el.end_slot::(close_button) diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 1869e33383d177..f19bfa70101ce3 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1008,7 +1008,7 @@ impl Item for TerminalView { }; h_flex() - .gap_2() + .gap_1() .group("term-tab-icon") .child( h_flex() From 4139a9a75862cbb8922a7f74537e09aad5ca1e62 Mon Sep 17 00:00:00 2001 From: iugo Date: Tue, 17 Sep 2024 21:23:48 +0800 Subject: [PATCH 127/762] docs: Document usage of deno fmt (#17918) Clarify in the settings description that the default formatter leverages `deno fmt`. This makes it clearer for users what to expect and how formatting is handled out of the box. Co-authored-by: Peter Tripp --- docs/src/languages/deno.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/src/languages/deno.md b/docs/src/languages/deno.md index 5b92acfeeb049b..9ba1526ab0fe97 100644 --- a/docs/src/languages/deno.md +++ b/docs/src/languages/deno.md @@ -26,7 +26,8 @@ To use the Deno Language Server with TypeScript and TSX files, you will likely w "!typescript-language-server", "!vtsls", "!eslint" - ] + ], + "formatter": "language_server" }, "TSX": { "language_servers": [ @@ -34,7 +35,8 @@ To use the Deno Language Server with TypeScript and TSX files, you will likely w "!typescript-language-server", "!vtsls", "!eslint" - ] + ], + "formatter": "language_server" } } } From f1d21362fa2b40406c2a71da7b4353d622095ea7 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 15:37:43 +0200 Subject: [PATCH 128/762] editor: Fix cursor_shape regression by not setting it to "bar" (#17934) This fixes the regression introduced here: https://github.com/zed-industries/zed/pull/17572#issuecomment-2355632615 Essentially: instead of always setting the value when saving settings, we don't set it by default, but fall back to the default value if it's not set. That fixes Vim mode's cursor being overwritten when settings change. Release Notes: - N/A --- assets/settings/default.json | 4 ++-- crates/editor/src/editor.rs | 8 ++++++-- crates/editor/src/editor_settings.rs | 4 ++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 7f8c823210efa3..35b2ca20f2a44b 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -121,8 +121,8 @@ // 4. A box drawn around the following character // "hollow" // - // Default: bar - "cursor_shape": "bar", + // Default: not set, defaults to "bar" + "cursor_shape": null, // How to highlight the current line in the editor. // // 1. Don't highlight the current line: diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index e583f2d9632db8..c3c54e49513d7a 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1904,7 +1904,9 @@ impl Editor { linked_editing_range_task: Default::default(), pending_rename: Default::default(), searchable: true, - cursor_shape: EditorSettings::get_global(cx).cursor_shape, + cursor_shape: EditorSettings::get_global(cx) + .cursor_shape + .unwrap_or_default(), current_line_highlight: None, autoindent_mode: Some(AutoindentMode::EachLine), collapse_matches: false, @@ -11820,7 +11822,9 @@ impl Editor { cx, ); let editor_settings = EditorSettings::get_global(cx); - self.cursor_shape = editor_settings.cursor_shape; + if let Some(cursor_shape) = editor_settings.cursor_shape { + self.cursor_shape = cursor_shape; + } self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index c6c5f111cb384f..d651e76c2c2e7d 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -7,7 +7,7 @@ use settings::{Settings, SettingsSources}; #[derive(Deserialize, Clone)] pub struct EditorSettings { pub cursor_blink: bool, - pub cursor_shape: CursorShape, + pub cursor_shape: Option, pub current_line_highlight: CurrentLineHighlight, pub hover_popover_enabled: bool, pub show_completions_on_input: bool, @@ -182,7 +182,7 @@ pub struct EditorSettingsContent { /// Cursor shape for the default editor. /// Can be "bar", "block", "underscore", or "hollow". /// - /// Default: bar + /// Default: None pub cursor_shape: Option, /// How to highlight the current line in the editor. /// From c48584fb7964d998b7bfe8f426f60f909db496d4 Mon Sep 17 00:00:00 2001 From: Kevin Wang Date: Tue, 17 Sep 2024 07:04:33 -0700 Subject: [PATCH 129/762] supermaven: Fix incorrect offset calculation (#17925) Fixes a bug in https://github.com/zed-industries/zed/pull/17578 when computing the offset. Specifically, `offset.add_assign()` should be incremented on every loop match instead of only when the completion text is found. Before: ![image](https://github.com/user-attachments/assets/cc09dbf9-03e8-4453-a1c7-11f838c1d959) After: ![image](https://github.com/user-attachments/assets/f3513769-d9e1-451f-97dc-b9ad3a57ce3a) Release Notes: - Fixed a wrong offset calculation in the Supermaven inline completion provider. --- crates/supermaven/src/supermaven_completion_provider.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index 41197717144265..261ce372d9f717 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -77,10 +77,10 @@ fn completion_state_from_diff( snapshot.anchor_after(offset), completion_text[i..i + k].into(), )); - offset.add_assign(j); } i += k + 1; j += 1; + offset.add_assign(1); } None => { // there are no more matching completions, so drop the remaining From a20c0eb62648adb99834c37cfba0961daf771d36 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 10:06:33 -0400 Subject: [PATCH 130/762] Improve error message when docs need fixing with Prettier (#17907) --- .github/workflows/docs.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index be0f3c5a823524..13fe0411a022ac 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -20,5 +20,11 @@ jobs: with: version: 9 - - run: pnpm dlx prettier . --check + - run: | + pnpm dlx prettier . --check || { + echo "To fix, run from the root of the zed repo:" + echo " cd docs && pnpm dlx prettier . --write && cd .." + false + } + working-directory: ./docs From 54b8232be2013e955e7b7d2a61cc82dc18cea5fa Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 17 Sep 2024 08:47:08 -0600 Subject: [PATCH 131/762] Introduce a new `/delta` command (#17903) Release Notes: - Added a new `/delta` command to re-insert changed files that were previously included in a context. --------- Co-authored-by: Roy --- Cargo.lock | 1 + crates/assistant/src/assistant.rs | 7 +- crates/assistant/src/assistant_panel.rs | 17 +- crates/assistant/src/context.rs | 15 +- crates/assistant/src/context/context_tests.rs | 5 +- crates/assistant/src/slash_command.rs | 1 + .../src/slash_command/auto_command.rs | 4 +- .../slash_command/context_server_command.rs | 6 +- .../src/slash_command/default_command.rs | 5 +- .../src/slash_command/delta_command.rs | 109 ++++++++ .../src/slash_command/diagnostics_command.rs | 234 +++++----------- .../src/slash_command/docs_command.rs | 5 +- .../src/slash_command/fetch_command.rs | 5 +- .../src/slash_command/file_command.rs | 260 ++++++++---------- .../src/slash_command/now_command.rs | 5 +- .../src/slash_command/project_command.rs | 5 +- .../src/slash_command/prompt_command.rs | 5 +- .../src/slash_command/search_command.rs | 3 + .../src/slash_command/symbols_command.rs | 5 +- .../src/slash_command/tab_command.rs | 47 +--- .../src/slash_command/terminal_command.rs | 5 +- .../src/slash_command/workflow_command.rs | 5 +- crates/assistant_slash_command/Cargo.toml | 1 + .../src/assistant_slash_command.rs | 13 +- .../extension/src/extension_slash_command.rs | 5 +- crates/proto/proto/zed.proto | 1 + 26 files changed, 408 insertions(+), 366 deletions(-) create mode 100644 crates/assistant/src/slash_command/delta_command.rs diff --git a/Cargo.lock b/Cargo.lock index b31beee09c62d7..6d0f7f54a01770 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -455,6 +455,7 @@ dependencies = [ "language", "parking_lot", "serde", + "serde_json", "workspace", ] diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index 7a73c188ec9dc1..af7f03ebb35264 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -41,9 +41,9 @@ use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; use slash_command::{ - auto_command, context_server_command, default_command, diagnostics_command, docs_command, - fetch_command, file_command, now_command, project_command, prompt_command, search_command, - symbols_command, tab_command, terminal_command, workflow_command, + auto_command, context_server_command, default_command, delta_command, diagnostics_command, + docs_command, fetch_command, file_command, now_command, project_command, prompt_command, + search_command, symbols_command, tab_command, terminal_command, workflow_command, }; use std::path::PathBuf; use std::sync::Arc; @@ -367,6 +367,7 @@ fn register_slash_commands(prompt_builder: Option>, cx: &mut let slash_command_registry = SlashCommandRegistry::global(cx); slash_command_registry.register_command(file_command::FileSlashCommand, true); + slash_command_registry.register_command(delta_command::DeltaSlashCommand, true); slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true); slash_command_registry.register_command(tab_command::TabSlashCommand, true); slash_command_registry.register_command(project_command::ProjectSlashCommand, true); diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 59f5e81d054c3f..52838b5c77b190 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -1906,7 +1906,22 @@ impl ContextEditor { cx: &mut ViewContext, ) { if let Some(command) = SlashCommandRegistry::global(cx).command(name) { - let output = command.run(arguments, workspace, self.lsp_adapter_delegate.clone(), cx); + let context = self.context.read(cx); + let sections = context + .slash_command_output_sections() + .into_iter() + .filter(|section| section.is_valid(context.buffer().read(cx))) + .cloned() + .collect::>(); + let snapshot = context.buffer().read(cx).snapshot(); + let output = command.run( + arguments, + §ions, + snapshot, + workspace, + self.lsp_adapter_delegate.clone(), + cx, + ); self.context.update(cx, |context, cx| { context.insert_command_output( command_range, diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 38ccddb962d336..d55b1aee088642 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -48,7 +48,7 @@ use std::{ }; use telemetry_events::AssistantKind; use text::BufferSnapshot; -use util::{post_inc, TryFutureExt}; +use util::{post_inc, ResultExt, TryFutureExt}; use uuid::Uuid; #[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)] @@ -162,6 +162,9 @@ impl ContextOperation { )?, icon: section.icon_name.parse()?, label: section.label.into(), + metadata: section + .metadata + .and_then(|metadata| serde_json::from_str(&metadata).log_err()), }) }) .collect::>>()?, @@ -242,6 +245,9 @@ impl ContextOperation { )), icon_name: icon_name.to_string(), label: section.label.to_string(), + metadata: section.metadata.as_ref().and_then(|metadata| { + serde_json::to_string(metadata).log_err() + }), } }) .collect(), @@ -635,12 +641,13 @@ impl Context { .slash_command_output_sections .iter() .filter_map(|section| { - let range = section.range.to_offset(buffer); - if section.range.start.is_valid(buffer) && !range.is_empty() { + if section.is_valid(buffer) { + let range = section.range.to_offset(buffer); Some(assistant_slash_command::SlashCommandOutputSection { range, icon: section.icon, label: section.label.clone(), + metadata: section.metadata.clone(), }) } else { None @@ -1825,6 +1832,7 @@ impl Context { ..buffer.anchor_before(start + section.range.end), icon: section.icon, label: section.label, + metadata: section.metadata, }) .collect::>(); sections.sort_by(|a, b| a.range.cmp(&b.range, buffer)); @@ -2977,6 +2985,7 @@ impl SavedContext { ..buffer.anchor_before(section.range.end), icon: section.icon, label: section.label, + metadata: section.metadata, } }) .collect(), diff --git a/crates/assistant/src/context/context_tests.rs b/crates/assistant/src/context/context_tests.rs index c851ca7438f5b0..842ac050786344 100644 --- a/crates/assistant/src/context/context_tests.rs +++ b/crates/assistant/src/context/context_tests.rs @@ -12,7 +12,7 @@ use assistant_slash_command::{ use collections::HashSet; use fs::FakeFs; use gpui::{AppContext, Model, SharedString, Task, TestAppContext, WeakView}; -use language::{Buffer, LanguageRegistry, LspAdapterDelegate}; +use language::{Buffer, BufferSnapshot, LanguageRegistry, LspAdapterDelegate}; use language_model::{LanguageModelCacheConfiguration, LanguageModelRegistry, Role}; use parking_lot::Mutex; use project::Project; @@ -1089,6 +1089,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std range: section_start..section_end, icon: ui::IconName::Ai, label: "section".into(), + metadata: None, }); } @@ -1425,6 +1426,8 @@ impl SlashCommand for FakeSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, _cx: &mut WindowContext, diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index 387e8231e47842..cf957a15c67d9d 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -22,6 +22,7 @@ use workspace::Workspace; pub mod auto_command; pub mod context_server_command; pub mod default_command; +pub mod delta_command; pub mod diagnostics_command; pub mod docs_command; pub mod fetch_command; diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs index cedfc63702b9aa..e1f20c311bd36e 100644 --- a/crates/assistant/src/slash_command/auto_command.rs +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -1,7 +1,7 @@ use super::create_label_for_command; use super::{SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Result}; -use assistant_slash_command::ArgumentCompletion; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use feature_flags::FeatureFlag; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext, Task, WeakView}; @@ -87,6 +87,8 @@ impl SlashCommand for AutoCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: language::BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, diff --git a/crates/assistant/src/slash_command/context_server_command.rs b/crates/assistant/src/slash_command/context_server_command.rs index 8ae9430a993e16..6b1ae39186d28f 100644 --- a/crates/assistant/src/slash_command/context_server_command.rs +++ b/crates/assistant/src/slash_command/context_server_command.rs @@ -9,7 +9,7 @@ use context_servers::{ protocol::PromptInfo, }; use gpui::{Task, WeakView, WindowContext}; -use language::{CodeLabel, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate}; use std::sync::atomic::AtomicBool; use std::sync::Arc; use text::LineEnding; @@ -96,7 +96,6 @@ impl SlashCommand for ContextServerSlashCommand { replace_previous_arguments: false, }) .collect(); - Ok(completions) }) } else { @@ -107,6 +106,8 @@ impl SlashCommand for ContextServerSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -141,6 +142,7 @@ impl SlashCommand for ContextServerSlashCommand { .description .unwrap_or(format!("Result from {}", prompt_name)), ), + metadata: None, }], text: prompt, run_commands_in_text: false, diff --git a/crates/assistant/src/slash_command/default_command.rs b/crates/assistant/src/slash_command/default_command.rs index 18db87b3228b9c..4199840300a24c 100644 --- a/crates/assistant/src/slash_command/default_command.rs +++ b/crates/assistant/src/slash_command/default_command.rs @@ -3,7 +3,7 @@ use crate::prompt_library::PromptStore; use anyhow::{anyhow, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use std::{ fmt::Write, sync::{atomic::AtomicBool, Arc}, @@ -43,6 +43,8 @@ impl SlashCommand for DefaultSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -70,6 +72,7 @@ impl SlashCommand for DefaultSlashCommand { range: 0..text.len(), icon: IconName::Library, label: "Default".into(), + metadata: None, }], text, run_commands_in_text: true, diff --git a/crates/assistant/src/slash_command/delta_command.rs b/crates/assistant/src/slash_command/delta_command.rs new file mode 100644 index 00000000000000..6a66ad3f09aa9a --- /dev/null +++ b/crates/assistant/src/slash_command/delta_command.rs @@ -0,0 +1,109 @@ +use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand}; +use anyhow::Result; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, +}; +use collections::HashSet; +use futures::future; +use gpui::{Task, WeakView, WindowContext}; +use language::{BufferSnapshot, LspAdapterDelegate}; +use std::sync::{atomic::AtomicBool, Arc}; +use text::OffsetRangeExt; +use workspace::Workspace; + +pub(crate) struct DeltaSlashCommand; + +impl SlashCommand for DeltaSlashCommand { + fn name(&self) -> String { + "delta".into() + } + + fn description(&self) -> String { + "re-insert changed files".into() + } + + fn menu_text(&self) -> String { + "Re-insert Changed Files".into() + } + + fn requires_argument(&self) -> bool { + false + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancellation_flag: Arc, + _workspace: Option>, + _cx: &mut WindowContext, + ) -> Task>> { + unimplemented!() + } + + fn run( + self: Arc, + _arguments: &[String], + context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: BufferSnapshot, + workspace: WeakView, + delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let mut paths = HashSet::default(); + let mut file_command_old_outputs = Vec::new(); + let mut file_command_new_outputs = Vec::new(); + for section in context_slash_command_output_sections.iter().rev() { + if let Some(metadata) = section + .metadata + .as_ref() + .and_then(|value| serde_json::from_value::(value.clone()).ok()) + { + if paths.insert(metadata.path.clone()) { + file_command_old_outputs.push( + context_buffer + .as_rope() + .slice(section.range.to_offset(&context_buffer)), + ); + file_command_new_outputs.push(Arc::new(FileSlashCommand).run( + &[metadata.path.clone()], + context_slash_command_output_sections, + context_buffer.clone(), + workspace.clone(), + delegate.clone(), + cx, + )); + } + } + } + + cx.background_executor().spawn(async move { + let mut output = SlashCommandOutput::default(); + + let file_command_new_outputs = future::join_all(file_command_new_outputs).await; + for (old_text, new_output) in file_command_old_outputs + .into_iter() + .zip(file_command_new_outputs) + { + if let Ok(new_output) = new_output { + if let Some(file_command_range) = new_output.sections.first() { + let new_text = &new_output.text[file_command_range.range.clone()]; + if old_text.chars().ne(new_text.chars()) { + output.sections.extend(new_output.sections.into_iter().map( + |section| SlashCommandOutputSection { + range: output.text.len() + section.range.start + ..output.text.len() + section.range.end, + icon: section.icon, + label: section.label, + metadata: section.metadata, + }, + )); + output.text.push_str(&new_output.text); + } + } + } + } + + Ok(output) + }) + } +} diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 21058306519787..3f79c016750318 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -9,10 +9,9 @@ use language::{ }; use project::{DiagnosticSummary, PathMatchCandidateSet, Project}; use rope::Point; -use std::fmt::Write; -use std::path::{Path, PathBuf}; use std::{ - ops::Range, + fmt::Write, + path::{Path, PathBuf}, sync::{atomic::AtomicBool, Arc}, }; use ui::prelude::*; @@ -163,6 +162,8 @@ impl SlashCommand for DiagnosticsSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -175,68 +176,7 @@ impl SlashCommand for DiagnosticsSlashCommand { let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx); - cx.spawn(move |_| async move { - let Some((text, sections)) = task.await? else { - return Ok(SlashCommandOutput { - sections: vec![SlashCommandOutputSection { - range: 0..1, - icon: IconName::Library, - label: "No Diagnostics".into(), - }], - text: "\n".to_string(), - run_commands_in_text: true, - }); - }; - - let sections = sections - .into_iter() - .map(|(range, placeholder_type)| SlashCommandOutputSection { - range, - icon: match placeholder_type { - PlaceholderType::Root(_, _) => IconName::Warning, - PlaceholderType::File(_) => IconName::File, - PlaceholderType::Diagnostic(DiagnosticType::Error, _) => IconName::XCircle, - PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => { - IconName::Warning - } - }, - label: match placeholder_type { - PlaceholderType::Root(summary, source) => { - let mut label = String::new(); - label.push_str("Diagnostics"); - if let Some(source) = source { - write!(label, " ({})", source).unwrap(); - } - - if summary.error_count > 0 || summary.warning_count > 0 { - label.push(':'); - - if summary.error_count > 0 { - write!(label, " {} errors", summary.error_count).unwrap(); - if summary.warning_count > 0 { - label.push_str(","); - } - } - - if summary.warning_count > 0 { - write!(label, " {} warnings", summary.warning_count).unwrap(); - } - } - - label.into() - } - PlaceholderType::File(file_path) => file_path.into(), - PlaceholderType::Diagnostic(_, message) => message.into(), - }, - }) - .collect(); - - Ok(SlashCommandOutput { - text, - sections, - run_commands_in_text: false, - }) - }) + cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) }) } } @@ -277,7 +217,7 @@ fn collect_diagnostics( project: Model, options: Options, cx: &mut AppContext, -) -> Task, PlaceholderType)>)>>> { +) -> Task>> { let error_source = if let Some(path_matcher) = &options.path_matcher { debug_assert_eq!(path_matcher.sources().len(), 1); Some(path_matcher.sources().first().cloned().unwrap_or_default()) @@ -318,13 +258,13 @@ fn collect_diagnostics( .collect(); cx.spawn(|mut cx| async move { - let mut text = String::new(); + let mut output = SlashCommandOutput::default(); + if let Some(error_source) = error_source.as_ref() { - writeln!(text, "diagnostics: {}", error_source).unwrap(); + writeln!(output.text, "diagnostics: {}", error_source).unwrap(); } else { - writeln!(text, "diagnostics").unwrap(); + writeln!(output.text, "diagnostics").unwrap(); } - let mut sections: Vec<(Range, PlaceholderType)> = Vec::new(); let mut project_summary = DiagnosticSummary::default(); for (project_path, path, summary) in diagnostic_summaries { @@ -341,10 +281,10 @@ fn collect_diagnostics( continue; } - let last_end = text.len(); + let last_end = output.text.len(); let file_path = path.to_string_lossy().to_string(); if !glob_is_exact_file_match { - writeln!(&mut text, "{file_path}").unwrap(); + writeln!(&mut output.text, "{file_path}").unwrap(); } if let Some(buffer) = project_handle @@ -352,75 +292,73 @@ fn collect_diagnostics( .await .log_err() { - collect_buffer_diagnostics( - &mut text, - &mut sections, - cx.read_model(&buffer, |buffer, _| buffer.snapshot())?, - options.include_warnings, - ); + let snapshot = cx.read_model(&buffer, |buffer, _| buffer.snapshot())?; + collect_buffer_diagnostics(&mut output, &snapshot, options.include_warnings); } if !glob_is_exact_file_match { - sections.push(( - last_end..text.len().saturating_sub(1), - PlaceholderType::File(file_path), - )) + output.sections.push(SlashCommandOutputSection { + range: last_end..output.text.len().saturating_sub(1), + icon: IconName::File, + label: file_path.into(), + metadata: None, + }); } } // No diagnostics found - if sections.is_empty() { + if output.sections.is_empty() { return Ok(None); } - sections.push(( - 0..text.len(), - PlaceholderType::Root(project_summary, error_source), - )); - Ok(Some((text, sections))) - }) -} - -pub fn buffer_has_error_diagnostics(snapshot: &BufferSnapshot) -> bool { - for (_, group) in snapshot.diagnostic_groups(None) { - let entry = &group.entries[group.primary_ix]; - if entry.diagnostic.severity == DiagnosticSeverity::ERROR { - return true; + let mut label = String::new(); + label.push_str("Diagnostics"); + if let Some(source) = error_source { + write!(label, " ({})", source).unwrap(); } - } - false -} -pub fn write_single_file_diagnostics( - output: &mut String, - path: Option<&Path>, - snapshot: &BufferSnapshot, -) -> bool { - if let Some(path) = path { - if buffer_has_error_diagnostics(&snapshot) { - output.push_str("/diagnostics "); - output.push_str(&path.to_string_lossy()); - return true; + if project_summary.error_count > 0 || project_summary.warning_count > 0 { + label.push(':'); + + if project_summary.error_count > 0 { + write!(label, " {} errors", project_summary.error_count).unwrap(); + if project_summary.warning_count > 0 { + label.push_str(","); + } + } + + if project_summary.warning_count > 0 { + write!(label, " {} warnings", project_summary.warning_count).unwrap(); + } } - } - false + + output.sections.insert( + 0, + SlashCommandOutputSection { + range: 0..output.text.len(), + icon: IconName::Warning, + label: label.into(), + metadata: None, + }, + ); + + Ok(Some(output)) + }) } -fn collect_buffer_diagnostics( - text: &mut String, - sections: &mut Vec<(Range, PlaceholderType)>, - snapshot: BufferSnapshot, +pub fn collect_buffer_diagnostics( + output: &mut SlashCommandOutput, + snapshot: &BufferSnapshot, include_warnings: bool, ) { for (_, group) in snapshot.diagnostic_groups(None) { let entry = &group.entries[group.primary_ix]; - collect_diagnostic(text, sections, entry, &snapshot, include_warnings) + collect_diagnostic(output, entry, &snapshot, include_warnings) } } fn collect_diagnostic( - text: &mut String, - sections: &mut Vec<(Range, PlaceholderType)>, + output: &mut SlashCommandOutput, entry: &DiagnosticEntry, snapshot: &BufferSnapshot, include_warnings: bool, @@ -428,17 +366,17 @@ fn collect_diagnostic( const EXCERPT_EXPANSION_SIZE: u32 = 2; const MAX_MESSAGE_LENGTH: usize = 2000; - let ty = match entry.diagnostic.severity { + let (ty, icon) = match entry.diagnostic.severity { DiagnosticSeverity::WARNING => { if !include_warnings { return; } - DiagnosticType::Warning + ("warning", IconName::Warning) } - DiagnosticSeverity::ERROR => DiagnosticType::Error, + DiagnosticSeverity::ERROR => ("error", IconName::XCircle), _ => return, }; - let prev_len = text.len(); + let prev_len = output.text.len(); let range = entry.range.to_point(snapshot); let diagnostic_row_number = range.start.row + 1; @@ -448,11 +386,11 @@ fn collect_diagnostic( let excerpt_range = Point::new(start_row, 0).to_offset(&snapshot)..Point::new(end_row, 0).to_offset(&snapshot); - text.push_str("```"); + output.text.push_str("```"); if let Some(language_name) = snapshot.language().map(|l| l.code_fence_block_name()) { - text.push_str(&language_name); + output.text.push_str(&language_name); } - text.push('\n'); + output.text.push('\n'); let mut buffer_text = String::new(); for chunk in snapshot.text_for_range(excerpt_range) { @@ -461,46 +399,26 @@ fn collect_diagnostic( for (i, line) in buffer_text.lines().enumerate() { let line_number = start_row + i as u32 + 1; - writeln!(text, "{}", line).unwrap(); + writeln!(output.text, "{}", line).unwrap(); if line_number == diagnostic_row_number { - text.push_str("//"); - let prev_len = text.len(); - write!(text, " {}: ", ty.as_str()).unwrap(); - let padding = text.len() - prev_len; + output.text.push_str("//"); + let prev_len = output.text.len(); + write!(output.text, " {}: ", ty).unwrap(); + let padding = output.text.len() - prev_len; let message = util::truncate(&entry.diagnostic.message, MAX_MESSAGE_LENGTH) .replace('\n', format!("\n//{:padding$}", "").as_str()); - writeln!(text, "{message}").unwrap(); + writeln!(output.text, "{message}").unwrap(); } } - writeln!(text, "```").unwrap(); - sections.push(( - prev_len..text.len().saturating_sub(1), - PlaceholderType::Diagnostic(ty, entry.diagnostic.message.clone()), - )) -} - -#[derive(Clone)] -pub enum PlaceholderType { - Root(DiagnosticSummary, Option), - File(String), - Diagnostic(DiagnosticType, String), -} - -#[derive(Copy, Clone)] -pub enum DiagnosticType { - Warning, - Error, -} - -impl DiagnosticType { - pub fn as_str(&self) -> &'static str { - match self { - DiagnosticType::Warning => "warning", - DiagnosticType::Error => "error", - } - } + writeln!(output.text, "```").unwrap(); + output.sections.push(SlashCommandOutputSection { + range: prev_len..output.text.len().saturating_sub(1), + icon, + label: entry.diagnostic.message.clone().into(), + metadata: None, + }); } diff --git a/crates/assistant/src/slash_command/docs_command.rs b/crates/assistant/src/slash_command/docs_command.rs index e114cfeab74b31..399ede9d999549 100644 --- a/crates/assistant/src/slash_command/docs_command.rs +++ b/crates/assistant/src/slash_command/docs_command.rs @@ -12,7 +12,7 @@ use indexed_docs::{ DocsDotRsProvider, IndexedDocsRegistry, IndexedDocsStore, LocalRustdocProvider, PackageName, ProviderId, }; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use project::{Project, ProjectPath}; use ui::prelude::*; use util::{maybe, ResultExt}; @@ -269,6 +269,8 @@ impl SlashCommand for DocsSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -349,6 +351,7 @@ impl SlashCommand for DocsSlashCommand { range, icon: IconName::FileDoc, label: format!("docs ({provider}): {key}",).into(), + metadata: None, }) .collect(), run_commands_in_text: false, diff --git a/crates/assistant/src/slash_command/fetch_command.rs b/crates/assistant/src/slash_command/fetch_command.rs index 8ecb6de7590663..23d3c884a8ec28 100644 --- a/crates/assistant/src/slash_command/fetch_command.rs +++ b/crates/assistant/src/slash_command/fetch_command.rs @@ -11,7 +11,7 @@ use futures::AsyncReadExt; use gpui::{Task, WeakView}; use html_to_markdown::{convert_html_to_markdown, markdown, TagHandler}; use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use workspace::Workspace; @@ -128,6 +128,8 @@ impl SlashCommand for FetchSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -161,6 +163,7 @@ impl SlashCommand for FetchSlashCommand { range, icon: IconName::AtSign, label: format!("fetch {}", url).into(), + metadata: None, }], run_commands_in_text: false, }) diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index e5d8f1b2d6bbb2..0df8b5d4e0b83c 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -1,10 +1,11 @@ -use super::{diagnostics_command::write_single_file_diagnostics, SlashCommand, SlashCommandOutput}; +use super::{diagnostics_command::collect_buffer_diagnostics, SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{AfterCompletion, ArgumentCompletion, SlashCommandOutputSection}; use fuzzy::PathMatch; use gpui::{AppContext, Model, Task, View, WeakView}; use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate}; use project::{PathMatchCandidateSet, Project}; +use serde::{Deserialize, Serialize}; use std::{ fmt::Write, ops::Range, @@ -175,6 +176,8 @@ impl SlashCommand for FileSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -187,54 +190,15 @@ impl SlashCommand for FileSlashCommand { return Task::ready(Err(anyhow!("missing path"))); }; - let task = collect_files(workspace.read(cx).project().clone(), arguments, cx); - - cx.foreground_executor().spawn(async move { - let output = task.await?; - Ok(SlashCommandOutput { - text: output.completion_text, - sections: output - .files - .into_iter() - .map(|file| { - build_entry_output_section( - file.range_in_text, - Some(&file.path), - file.entry_type == EntryType::Directory, - None, - ) - }) - .collect(), - run_commands_in_text: true, - }) - }) + collect_files(workspace.read(cx).project().clone(), arguments, cx) } } -#[derive(Clone, Copy, PartialEq, Debug)] -enum EntryType { - File, - Directory, -} - -#[derive(Clone, PartialEq, Debug)] -struct FileCommandOutput { - completion_text: String, - files: Vec, -} - -#[derive(Clone, PartialEq, Debug)] -struct OutputFile { - range_in_text: Range, - path: PathBuf, - entry_type: EntryType, -} - fn collect_files( project: Model, glob_inputs: &[String], cx: &mut AppContext, -) -> Task> { +) -> Task> { let Ok(matchers) = glob_inputs .into_iter() .map(|glob_input| { @@ -254,8 +218,7 @@ fn collect_files( .collect::>(); cx.spawn(|mut cx| async move { - let mut text = String::new(); - let mut ranges = Vec::new(); + let mut output = SlashCommandOutput::default(); for snapshot in snapshots { let worktree_id = snapshot.id(); let mut directory_stack: Vec<(Arc, String, usize)> = Vec::new(); @@ -279,11 +242,12 @@ fn collect_files( break; } let (_, entry_name, start) = directory_stack.pop().unwrap(); - ranges.push(OutputFile { - range_in_text: start..text.len().saturating_sub(1), - path: PathBuf::from(entry_name), - entry_type: EntryType::Directory, - }); + output.sections.push(build_entry_output_section( + start..output.text.len().saturating_sub(1), + Some(&PathBuf::from(entry_name)), + true, + None, + )); } let filename = entry @@ -315,21 +279,23 @@ fn collect_files( continue; } let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/"); - let entry_start = text.len(); + let entry_start = output.text.len(); if prefix_paths.is_empty() { if is_top_level_directory { - text.push_str(&path_including_worktree_name.to_string_lossy()); + output + .text + .push_str(&path_including_worktree_name.to_string_lossy()); is_top_level_directory = false; } else { - text.push_str(&filename); + output.text.push_str(&filename); } directory_stack.push((entry.path.clone(), filename, entry_start)); } else { let entry_name = format!("{}/{}", prefix_paths, &filename); - text.push_str(&entry_name); + output.text.push_str(&entry_name); directory_stack.push((entry.path.clone(), entry_name, entry_start)); } - text.push('\n'); + output.text.push('\n'); } else if entry.is_file() { let Some(open_buffer_task) = project_handle .update(&mut cx, |project, cx| { @@ -340,28 +306,13 @@ fn collect_files( continue; }; if let Some(buffer) = open_buffer_task.await.log_err() { - let buffer_snapshot = - cx.read_model(&buffer, |buffer, _| buffer.snapshot())?; - let prev_len = text.len(); - collect_file_content( - &mut text, - &buffer_snapshot, - path_including_worktree_name.to_string_lossy().to_string(), - ); - text.push('\n'); - if !write_single_file_diagnostics( - &mut text, + let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; + append_buffer_to_output( + &snapshot, Some(&path_including_worktree_name), - &buffer_snapshot, - ) { - text.pop(); - } - ranges.push(OutputFile { - range_in_text: prev_len..text.len(), - path: path_including_worktree_name, - entry_type: EntryType::File, - }); - text.push('\n'); + &mut output, + ) + .log_err(); } } } @@ -371,42 +322,26 @@ fn collect_files( let mut root_path = PathBuf::new(); root_path.push(snapshot.root_name()); root_path.push(&dir); - ranges.push(OutputFile { - range_in_text: start..text.len(), - path: root_path, - entry_type: EntryType::Directory, - }); + output.sections.push(build_entry_output_section( + start..output.text.len(), + Some(&root_path), + true, + None, + )); } else { - ranges.push(OutputFile { - range_in_text: start..text.len(), - path: PathBuf::from(entry.as_str()), - entry_type: EntryType::Directory, - }); + output.sections.push(build_entry_output_section( + start..output.text.len(), + Some(&PathBuf::from(entry.as_str())), + true, + None, + )); } } } - Ok(FileCommandOutput { - completion_text: text, - files: ranges, - }) + Ok(output) }) } -fn collect_file_content(buffer: &mut String, snapshot: &BufferSnapshot, filename: String) { - let mut content = snapshot.text(); - LineEnding::normalize(&mut content); - buffer.reserve(filename.len() + content.len() + 9); - buffer.push_str(&codeblock_fence_for_path( - Some(&PathBuf::from(filename)), - None, - )); - buffer.push_str(&content); - if !buffer.ends_with('\n') { - buffer.push('\n'); - } - buffer.push_str("```"); -} - pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option>) -> String { let mut text = String::new(); write!(text, "```").unwrap(); @@ -429,6 +364,11 @@ pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option, path: Option<&Path>, @@ -454,6 +394,16 @@ pub fn build_entry_output_section( range, icon, label: label.into(), + metadata: if is_directory { + None + } else { + path.and_then(|path| { + serde_json::to_value(FileCommandMetadata { + path: path.to_string_lossy().to_string(), + }) + .ok() + }) + }, } } @@ -539,6 +489,36 @@ mod custom_path_matcher { } } +pub fn append_buffer_to_output( + buffer: &BufferSnapshot, + path: Option<&Path>, + output: &mut SlashCommandOutput, +) -> Result<()> { + let prev_len = output.text.len(); + + let mut content = buffer.text(); + LineEnding::normalize(&mut content); + output.text.push_str(&codeblock_fence_for_path(path, None)); + output.text.push_str(&content); + if !output.text.ends_with('\n') { + output.text.push('\n'); + } + output.text.push_str("```"); + output.text.push('\n'); + + let section_ix = output.sections.len(); + collect_buffer_diagnostics(output, buffer, false); + + output.sections.insert( + section_ix, + build_entry_output_section(prev_len..output.text.len(), path, false, None), + ); + + output.text.push('\n'); + + Ok(()) +} + #[cfg(test)] mod test { use fs::FakeFs; @@ -591,9 +571,9 @@ mod test { .await .unwrap(); - assert!(result_1.completion_text.starts_with("root/dir")); + assert!(result_1.text.starts_with("root/dir")); // 4 files + 2 directories - assert_eq!(6, result_1.files.len()); + assert_eq!(result_1.sections.len(), 6); let result_2 = cx .update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx)) @@ -607,9 +587,9 @@ mod test { .await .unwrap(); - assert!(result.completion_text.starts_with("root/dir")); + assert!(result.text.starts_with("root/dir")); // 5 files + 2 directories - assert_eq!(7, result.files.len()); + assert_eq!(result.sections.len(), 7); // Ensure that the project lasts until after the last await drop(project); @@ -654,36 +634,27 @@ mod test { .unwrap(); // Sanity check - assert!(result.completion_text.starts_with("zed/assets/themes\n")); - assert_eq!(7, result.files.len()); + assert!(result.text.starts_with("zed/assets/themes\n")); + assert_eq!(result.sections.len(), 7); // Ensure that full file paths are included in the real output - assert!(result - .completion_text - .contains("zed/assets/themes/andromeda/LICENSE")); - assert!(result - .completion_text - .contains("zed/assets/themes/ayu/LICENSE")); - assert!(result - .completion_text - .contains("zed/assets/themes/summercamp/LICENSE")); - - assert_eq!("summercamp", result.files[5].path.to_string_lossy()); + assert!(result.text.contains("zed/assets/themes/andromeda/LICENSE")); + assert!(result.text.contains("zed/assets/themes/ayu/LICENSE")); + assert!(result.text.contains("zed/assets/themes/summercamp/LICENSE")); + + assert_eq!(result.sections[5].label, "summercamp"); // Ensure that things are in descending order, with properly relativized paths assert_eq!( - "zed/assets/themes/andromeda/LICENSE", - result.files[0].path.to_string_lossy() - ); - assert_eq!("andromeda", result.files[1].path.to_string_lossy()); - assert_eq!( - "zed/assets/themes/ayu/LICENSE", - result.files[2].path.to_string_lossy() + result.sections[0].label, + "zed/assets/themes/andromeda/LICENSE" ); - assert_eq!("ayu", result.files[3].path.to_string_lossy()); + assert_eq!(result.sections[1].label, "andromeda"); + assert_eq!(result.sections[2].label, "zed/assets/themes/ayu/LICENSE"); + assert_eq!(result.sections[3].label, "ayu"); assert_eq!( - "zed/assets/themes/summercamp/LICENSE", - result.files[4].path.to_string_lossy() + result.sections[4].label, + "zed/assets/themes/summercamp/LICENSE" ); // Ensure that the project lasts until after the last await @@ -723,27 +694,24 @@ mod test { .await .unwrap(); - assert!(result.completion_text.starts_with("zed/assets/themes\n")); - assert_eq!( - "zed/assets/themes/LICENSE", - result.files[0].path.to_string_lossy() - ); + assert!(result.text.starts_with("zed/assets/themes\n")); + assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE"); assert_eq!( - "zed/assets/themes/summercamp/LICENSE", - result.files[1].path.to_string_lossy() + result.sections[1].label, + "zed/assets/themes/summercamp/LICENSE" ); assert_eq!( - "zed/assets/themes/summercamp/subdir/LICENSE", - result.files[2].path.to_string_lossy() + result.sections[2].label, + "zed/assets/themes/summercamp/subdir/LICENSE" ); assert_eq!( - "zed/assets/themes/summercamp/subdir/subsubdir/LICENSE", - result.files[3].path.to_string_lossy() + result.sections[3].label, + "zed/assets/themes/summercamp/subdir/subsubdir/LICENSE" ); - assert_eq!("subsubdir", result.files[4].path.to_string_lossy()); - assert_eq!("subdir", result.files[5].path.to_string_lossy()); - assert_eq!("summercamp", result.files[6].path.to_string_lossy()); - assert_eq!("zed/assets/themes", result.files[7].path.to_string_lossy()); + assert_eq!(result.sections[4].label, "subsubdir"); + assert_eq!(result.sections[5].label, "subdir"); + assert_eq!(result.sections[6].label, "summercamp"); + assert_eq!(result.sections[7].label, "zed/assets/themes"); // Ensure that the project lasts until after the last await drop(project); diff --git a/crates/assistant/src/slash_command/now_command.rs b/crates/assistant/src/slash_command/now_command.rs index eb6277a7d92561..eb0ca926f015b6 100644 --- a/crates/assistant/src/slash_command/now_command.rs +++ b/crates/assistant/src/slash_command/now_command.rs @@ -7,7 +7,7 @@ use assistant_slash_command::{ }; use chrono::Local; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use workspace::Workspace; @@ -43,6 +43,8 @@ impl SlashCommand for NowSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, _cx: &mut WindowContext, @@ -57,6 +59,7 @@ impl SlashCommand for NowSlashCommand { range, icon: IconName::CountdownTimer, label: now.to_rfc2822().into(), + metadata: None, }], run_commands_in_text: false, })) diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 8182734e7214f8..3e8596d942bf61 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -3,7 +3,7 @@ use anyhow::{anyhow, Context, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use fs::Fs; use gpui::{AppContext, Model, Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use project::{Project, ProjectPath}; use std::{ fmt::Write, @@ -118,6 +118,8 @@ impl SlashCommand for ProjectSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -140,6 +142,7 @@ impl SlashCommand for ProjectSlashCommand { range, icon: IconName::FileTree, label: "Project".into(), + metadata: None, }], run_commands_in_text: false, }) diff --git a/crates/assistant/src/slash_command/prompt_command.rs b/crates/assistant/src/slash_command/prompt_command.rs index 4d64bba2edb740..effbcc0f90ce87 100644 --- a/crates/assistant/src/slash_command/prompt_command.rs +++ b/crates/assistant/src/slash_command/prompt_command.rs @@ -3,7 +3,7 @@ use crate::prompt_library::PromptStore; use anyhow::{anyhow, Context, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use std::sync::{atomic::AtomicBool, Arc}; use ui::prelude::*; use workspace::Workspace; @@ -56,6 +56,8 @@ impl SlashCommand for PromptSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -95,6 +97,7 @@ impl SlashCommand for PromptSlashCommand { range, icon: IconName::Library, label: title, + metadata: None, }], run_commands_in_text: true, }) diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index 3a513ed9ad3784..72d86ec5c54000 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -60,6 +60,8 @@ impl SlashCommand for SearchSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: language::BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -168,6 +170,7 @@ impl SlashCommand for SearchSlashCommand { range: 0..text.len(), icon: IconName::MagnifyingGlass, label: query, + metadata: None, }); SlashCommandOutput { diff --git a/crates/assistant/src/slash_command/symbols_command.rs b/crates/assistant/src/slash_command/symbols_command.rs index c9582f2882472d..1cf8536c0dbfe7 100644 --- a/crates/assistant/src/slash_command/symbols_command.rs +++ b/crates/assistant/src/slash_command/symbols_command.rs @@ -3,7 +3,7 @@ use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use editor::Editor; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use std::sync::Arc; use std::{path::Path, sync::atomic::AtomicBool}; use ui::{IconName, WindowContext}; @@ -41,6 +41,8 @@ impl SlashCommand for OutlineSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -77,6 +79,7 @@ impl SlashCommand for OutlineSlashCommand { range: 0..outline_text.len(), icon: IconName::ListTree, label: path.to_string_lossy().to_string().into(), + metadata: None, }], text: outline_text, run_commands_in_text: false, diff --git a/crates/assistant/src/slash_command/tab_command.rs b/crates/assistant/src/slash_command/tab_command.rs index 1a6884b8538221..bdf8450d43be81 100644 --- a/crates/assistant/src/slash_command/tab_command.rs +++ b/crates/assistant/src/slash_command/tab_command.rs @@ -1,21 +1,17 @@ -use super::{ - diagnostics_command::write_single_file_diagnostics, - file_command::{build_entry_output_section, codeblock_fence_for_path}, - SlashCommand, SlashCommandOutput, -}; +use super::{file_command::append_buffer_to_output, SlashCommand, SlashCommandOutput}; use anyhow::{Context, Result}; -use assistant_slash_command::ArgumentCompletion; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use collections::{HashMap, HashSet}; use editor::Editor; use futures::future::join_all; use gpui::{Entity, Task, WeakView}; use language::{BufferSnapshot, CodeLabel, HighlightId, LspAdapterDelegate}; use std::{ - fmt::Write, path::PathBuf, sync::{atomic::AtomicBool, Arc}, }; use ui::{ActiveTheme, WindowContext}; +use util::ResultExt; use workspace::Workspace; pub(crate) struct TabSlashCommand; @@ -131,6 +127,8 @@ impl SlashCommand for TabSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -144,40 +142,11 @@ impl SlashCommand for TabSlashCommand { ); cx.background_executor().spawn(async move { - let mut sections = Vec::new(); - let mut text = String::new(); - let mut has_diagnostics = false; + let mut output = SlashCommandOutput::default(); for (full_path, buffer, _) in tab_items_search.await? { - let section_start_ix = text.len(); - text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None)); - for chunk in buffer.as_rope().chunks() { - text.push_str(chunk); - } - if !text.ends_with('\n') { - text.push('\n'); - } - writeln!(text, "```").unwrap(); - if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) { - has_diagnostics = true; - } - if !text.ends_with('\n') { - text.push('\n'); - } - - let section_end_ix = text.len() - 1; - sections.push(build_entry_output_section( - section_start_ix..section_end_ix, - full_path.as_deref(), - false, - None, - )); + append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err(); } - - Ok(SlashCommandOutput { - text, - sections, - run_commands_in_text: has_diagnostics, - }) + Ok(output) }) } } diff --git a/crates/assistant/src/slash_command/terminal_command.rs b/crates/assistant/src/slash_command/terminal_command.rs index 04baabd39669bc..1d0293c235d442 100644 --- a/crates/assistant/src/slash_command/terminal_command.rs +++ b/crates/assistant/src/slash_command/terminal_command.rs @@ -6,7 +6,7 @@ use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, }; use gpui::{AppContext, Task, View, WeakView}; -use language::{CodeLabel, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate}; use terminal_view::{terminal_panel::TerminalPanel, TerminalView}; use ui::prelude::*; use workspace::{dock::Panel, Workspace}; @@ -57,6 +57,8 @@ impl SlashCommand for TerminalSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -91,6 +93,7 @@ impl SlashCommand for TerminalSlashCommand { range, icon: IconName::Terminal, label: "Terminal".into(), + metadata: None, }], run_commands_in_text: false, })) diff --git a/crates/assistant/src/slash_command/workflow_command.rs b/crates/assistant/src/slash_command/workflow_command.rs index f588fe848d5b32..c66dd9bebff144 100644 --- a/crates/assistant/src/slash_command/workflow_command.rs +++ b/crates/assistant/src/slash_command/workflow_command.rs @@ -8,7 +8,7 @@ use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, }; use gpui::{Task, WeakView}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use workspace::Workspace; @@ -53,6 +53,8 @@ impl SlashCommand for WorkflowSlashCommand { fn run( self: Arc, _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, @@ -68,6 +70,7 @@ impl SlashCommand for WorkflowSlashCommand { range, icon: IconName::Route, label: "Workflow".into(), + metadata: None, }], run_commands_in_text: false, }) diff --git a/crates/assistant_slash_command/Cargo.toml b/crates/assistant_slash_command/Cargo.toml index 3d764bb0be9d82..a58a84312fc3e2 100644 --- a/crates/assistant_slash_command/Cargo.toml +++ b/crates/assistant_slash_command/Cargo.toml @@ -19,4 +19,5 @@ gpui.workspace = true language.workspace = true parking_lot.workspace = true serde.workspace = true +serde_json.workspace = true workspace.workspace = true diff --git a/crates/assistant_slash_command/src/assistant_slash_command.rs b/crates/assistant_slash_command/src/assistant_slash_command.rs index c5dece11ca59a8..36e229d49a246d 100644 --- a/crates/assistant_slash_command/src/assistant_slash_command.rs +++ b/crates/assistant_slash_command/src/assistant_slash_command.rs @@ -2,7 +2,7 @@ mod slash_command_registry; use anyhow::Result; use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext}; -use language::{CodeLabel, LspAdapterDelegate}; +use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt}; use serde::{Deserialize, Serialize}; pub use slash_command_registry::*; use std::{ @@ -77,6 +77,8 @@ pub trait SlashCommand: 'static + Send + Sync { fn run( self: Arc, arguments: &[String], + context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: BufferSnapshot, workspace: WeakView, // TODO: We're just using the `LspAdapterDelegate` here because that is // what the extension API is already expecting. @@ -94,7 +96,7 @@ pub type RenderFoldPlaceholder = Arc< + Fn(ElementId, Arc, &mut WindowContext) -> AnyElement, >; -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq)] pub struct SlashCommandOutput { pub text: String, pub sections: Vec>, @@ -106,4 +108,11 @@ pub struct SlashCommandOutputSection { pub range: Range, pub icon: IconName, pub label: SharedString, + pub metadata: Option, +} + +impl SlashCommandOutputSection { + pub fn is_valid(&self, buffer: &language::TextBuffer) -> bool { + self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty() + } } diff --git a/crates/extension/src/extension_slash_command.rs b/crates/extension/src/extension_slash_command.rs index 60b027ef9d0d27..3dfbc4c03d9bb0 100644 --- a/crates/extension/src/extension_slash_command.rs +++ b/crates/extension/src/extension_slash_command.rs @@ -6,7 +6,7 @@ use assistant_slash_command::{ }; use futures::FutureExt; use gpui::{Task, WeakView, WindowContext}; -use language::LspAdapterDelegate; +use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; use wasmtime_wasi::WasiView; use workspace::Workspace; @@ -82,6 +82,8 @@ impl SlashCommand for ExtensionSlashCommand { fn run( self: Arc, arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, _workspace: WeakView, delegate: Option>, cx: &mut WindowContext, @@ -121,6 +123,7 @@ impl SlashCommand for ExtensionSlashCommand { range: section.range.into(), icon: IconName::Code, label: section.label.into(), + metadata: None, }) .collect(), run_commands_in_text: false, diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index a10b3798a42532..77942c8a94d542 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -2390,6 +2390,7 @@ message SlashCommandOutputSection { AnchorRange range = 1; string icon_name = 2; string label = 3; + optional string metadata = 4; } message ContextOperation { From ddaee2e8dd507d5eab009e789e57c0d4fba6c5bf Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 17:03:10 +0200 Subject: [PATCH 132/762] ssh: Handle BufferSaved ssh message (#17936) Release Notes: - N/A Co-authored-by: Conrad --- crates/project/src/project.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 14524180ab556d..5d9ac5e8214efb 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -797,9 +797,8 @@ impl Project { ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.settings_observer); client.add_model_message_handler(Self::handle_update_worktree); client.add_model_message_handler(Self::handle_create_buffer_for_peer); - client.add_model_message_handler(BufferStore::handle_update_buffer_file); - client.add_model_message_handler(BufferStore::handle_update_diff_base); client.add_model_request_handler(BufferStore::handle_update_buffer); + BufferStore::init(&client); LspStore::init(&client); SettingsObserver::init(&client); From ecd183079371d126e2740b4e5b838ce92cd45591 Mon Sep 17 00:00:00 2001 From: Erick Guan <297343+erickguan@users.noreply.github.com> Date: Tue, 17 Sep 2024 17:19:07 +0200 Subject: [PATCH 133/762] Fix opening file with colon (#17281) Closes #14100 Release Notes: - Fixed unable to open file with a colon from Zed CLI ----- I didn't make change to tests for the first two commits. I changed them to easily find offending test cases. Behavior changes are in last commit message. In the last commit, I changed how `PathWithPosition` should intreprete file paths. If my assumptions are off, please advise so that I can make another approach. I also believe further constraints would be better for `PathWithPosition`'s intention. But people can make future improvements to `PathWithPosition`. --- crates/util/src/paths.rs | 500 +++++++++++++++++++++++---------------- 1 file changed, 301 insertions(+), 199 deletions(-) diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index cd5beedf47b2cb..f4ecfefc52a872 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -98,10 +98,6 @@ impl> PathExt for T { /// A delimiter to use in `path_query:row_number:column_number` strings parsing. pub const FILE_ROW_COLUMN_DELIMITER: char = ':'; -/// Extracts filename and row-column suffixes. -/// Parenthesis format is used by [MSBuild](https://learn.microsoft.com/en-us/visualstudio/msbuild/msbuild-diagnostic-format-for-tasks) compatible tools -// NOTE: All cases need to have exactly three capture groups for extract(): file_name, row and column. -// Valid patterns that don't contain row and/or column should have empty groups in their place. const ROW_COL_CAPTURE_REGEX: &str = r"(?x) ([^\(]+)(?: \((\d+),(\d+)\) # filename(row,column) @@ -109,12 +105,12 @@ const ROW_COL_CAPTURE_REGEX: &str = r"(?x) \((\d+)\)() # filename(row) ) | - ([^\:]+)(?: - \:(\d+)\:(\d+) # filename:row:column + (.+?)(?: + \:+(\d+)\:(\d+)\:*$ # filename:row:column | - \:(\d+)() # filename:row + \:+(\d+)\:*()$ # filename:row | - \:()() # filename: + \:*()()$ # filename: )"; /// A representation of a path-like string with optional row and column numbers. @@ -136,9 +132,92 @@ impl PathWithPosition { column: None, } } + /// Parses a string that possibly has `:row:column` or `(row, column)` suffix. + /// Parenthesis format is used by [MSBuild](https://learn.microsoft.com/en-us/visualstudio/msbuild/msbuild-diagnostic-format-for-tasks) compatible tools /// Ignores trailing `:`s, so `test.rs:22:` is parsed as `test.rs:22`. /// If the suffix parsing fails, the whole string is parsed as a path. + /// + /// Be mindful that `test_file:10:1:` is a valid posix filename. + /// `PathWithPosition` class assumes that the ending position-like suffix is **not** part of the filename. + /// + /// # Examples + /// + /// ``` + /// # use util::paths::PathWithPosition; + /// # use std::path::PathBuf; + /// assert_eq!(PathWithPosition::parse_str("test_file"), PathWithPosition { + /// path: PathBuf::from("test_file"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file:10"), PathWithPosition { + /// path: PathBuf::from("test_file"), + /// row: Some(10), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1:2"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: Some(2), + /// }); + /// ``` + /// + /// # Expected parsing results when encounter ill-formatted inputs. + /// ``` + /// # use util::paths::PathWithPosition; + /// # use std::path::PathBuf; + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:a"), PathWithPosition { + /// path: PathBuf::from("test_file.rs:a"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:a:b"), PathWithPosition { + /// path: PathBuf::from("test_file.rs:a:b"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs::"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: None, + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs::1"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1::"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs::1:2"), PathWithPosition { + /// path: PathBuf::from("test_file.rs"), + /// row: Some(1), + /// column: Some(2), + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1::2"), PathWithPosition { + /// path: PathBuf::from("test_file.rs:1"), + /// row: Some(2), + /// column: None, + /// }); + /// assert_eq!(PathWithPosition::parse_str("test_file.rs:1:2:3"), PathWithPosition { + /// path: PathBuf::from("test_file.rs:1"), + /// row: Some(2), + /// column: Some(3), + /// }); + /// ``` pub fn parse_str(s: &str) -> Self { let trimmed = s.trim(); let path = Path::new(trimmed); @@ -359,206 +438,229 @@ mod tests { } #[test] - fn path_with_position_parsing_positive() { - let input_and_expected = [ - ( - "test_file.rs", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: None, - column: None, - }, - ), - ( - "test_file.rs:1", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: Some(1), - column: None, - }, - ), - ( - "test_file.rs:1:2", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: Some(1), - column: Some(2), - }, - ), - ]; + fn path_with_position_parse_posix_path() { + // Test POSIX filename edge cases + // Read more at https://en.wikipedia.org/wiki/Filename + assert_eq!( + PathWithPosition::parse_str(" test_file"), + PathWithPosition { + path: PathBuf::from("test_file"), + row: None, + column: None + } + ); - for (input, expected) in input_and_expected { - let actual = PathWithPosition::parse_str(input); - assert_eq!( - actual, expected, - "For positive case input str '{input}', got a parse mismatch" - ); - } + assert_eq!( + PathWithPosition::parse_str("a:bc:.zip:1"), + PathWithPosition { + path: PathBuf::from("a:bc:.zip"), + row: Some(1), + column: None + } + ); + + assert_eq!( + PathWithPosition::parse_str("one.second.zip:1"), + PathWithPosition { + path: PathBuf::from("one.second.zip"), + row: Some(1), + column: None + } + ); + + // Trim off trailing `:`s for otherwise valid input. + assert_eq!( + PathWithPosition::parse_str("test_file:10:1:"), + PathWithPosition { + path: PathBuf::from("test_file"), + row: Some(10), + column: Some(1) + } + ); + + assert_eq!( + PathWithPosition::parse_str("test_file.rs:"), + PathWithPosition { + path: PathBuf::from("test_file.rs"), + row: None, + column: None + } + ); + + assert_eq!( + PathWithPosition::parse_str("test_file.rs:1:"), + PathWithPosition { + path: PathBuf::from("test_file.rs"), + row: Some(1), + column: None + } + ); } #[test] - fn path_with_position_parsing_negative() { - for (input, row, column) in [ - ("test_file.rs:a", None, None), - ("test_file.rs:a:b", None, None), - ("test_file.rs::", None, None), - ("test_file.rs::1", None, None), - ("test_file.rs:1::", Some(1), None), - ("test_file.rs::1:2", None, None), - ("test_file.rs:1::2", Some(1), None), - ("test_file.rs:1:2:3", Some(1), Some(2)), - ] { - let actual = PathWithPosition::parse_str(input); - assert_eq!( - actual, - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row, - column, - }, - "For negative case input str '{input}', got a parse mismatch" - ); - } + #[cfg(not(target_os = "windows"))] + fn path_with_position_parse_posix_path_with_suffix() { + assert_eq!( + PathWithPosition::parse_str("app-editors:zed-0.143.6:20240710-201212.log:34:"), + PathWithPosition { + path: PathBuf::from("app-editors:zed-0.143.6:20240710-201212.log"), + row: Some(34), + column: None, + } + ); + + assert_eq!( + PathWithPosition::parse_str("crates/file_finder/src/file_finder.rs:1902:13:"), + PathWithPosition { + path: PathBuf::from("crates/file_finder/src/file_finder.rs"), + row: Some(1902), + column: Some(13), + } + ); + + assert_eq!( + PathWithPosition::parse_str("crate/utils/src/test:today.log:34"), + PathWithPosition { + path: PathBuf::from("crate/utils/src/test:today.log"), + row: Some(34), + column: None, + } + ); } - // Trim off trailing `:`s for otherwise valid input. #[test] - fn path_with_position_parsing_special() { - #[cfg(not(target_os = "windows"))] - let input_and_expected = [ - ( - "test_file.rs:", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: None, - column: None, - }, - ), - ( - "test_file.rs:1:", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: Some(1), - column: None, - }, - ), - ( - "crates/file_finder/src/file_finder.rs:1902:13:", - PathWithPosition { - path: PathBuf::from("crates/file_finder/src/file_finder.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ]; + #[cfg(target_os = "windows")] + fn path_with_position_parse_windows_path() { + assert_eq!( + PathWithPosition::parse_str("crates\\utils\\paths.rs"), + PathWithPosition { + path: PathBuf::from("crates\\utils\\paths.rs"), + row: None, + column: None + } + ); - #[cfg(target_os = "windows")] - let input_and_expected = [ - ( - "test_file.rs:", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: None, - column: None, - }, - ), - ( - "test_file.rs:1:", - PathWithPosition { - path: PathBuf::from("test_file.rs"), - row: Some(1), - column: None, - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs:1902:13:", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs:1902:13:15:", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs:1902:::15:", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: None, - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs(1902,13):", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "\\\\?\\C:\\Users\\someone\\test_file.rs(1902):", - PathWithPosition { - path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: None, - }, - ), - ( - "C:\\Users\\someone\\test_file.rs:1902:13:", - PathWithPosition { - path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "crates/utils/paths.rs", - PathWithPosition { - path: PathBuf::from("crates\\utils\\paths.rs"), - row: None, - column: None, - }, - ), - ( - "C:\\Users\\someone\\test_file.rs(1902,13):", - PathWithPosition { - path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: Some(13), - }, - ), - ( - "C:\\Users\\someone\\test_file.rs(1902):", - PathWithPosition { - path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), - row: Some(1902), - column: None, - }, - ), - ( - "crates/utils/paths.rs:101", - PathWithPosition { - path: PathBuf::from("crates\\utils\\paths.rs"), - row: Some(101), - column: None, - }, - ), - ]; + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: None, + column: None + } + ); + } - for (input, expected) in input_and_expected { - let actual = PathWithPosition::parse_str(input); - assert_eq!( - actual, expected, - "For special case input str '{input}', got a parse mismatch" - ); - } + #[test] + #[cfg(target_os = "windows")] + fn path_with_position_parse_windows_path_with_suffix() { + assert_eq!( + PathWithPosition::parse_str("crates\\utils\\paths.rs:101"), + PathWithPosition { + path: PathBuf::from("crates\\utils\\paths.rs"), + row: Some(101), + column: None + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs:1:20"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), + row: Some(1), + column: Some(20) + } + ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs(1902,13)"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13) + } + ); + + // Trim off trailing `:`s for otherwise valid input. + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs:1902:13:"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13) + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs:1902:13:15:"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs:1902"), + row: Some(13), + column: Some(15) + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs:1902:::15:"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs:1902"), + row: Some(15), + column: None + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs(1902,13):"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13), + } + ); + + assert_eq!( + PathWithPosition::parse_str("\\\\?\\C:\\Users\\someone\\test_file.rs(1902):"), + PathWithPosition { + path: PathBuf::from("\\\\?\\C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: None, + } + ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs:1902:13:"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13), + } + ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs(1902,13):"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: Some(13), + } + ); + + assert_eq!( + PathWithPosition::parse_str("C:\\Users\\someone\\test_file.rs(1902):"), + PathWithPosition { + path: PathBuf::from("C:\\Users\\someone\\test_file.rs"), + row: Some(1902), + column: None, + } + ); + + assert_eq!( + PathWithPosition::parse_str("crates/utils/paths.rs:101"), + PathWithPosition { + path: PathBuf::from("crates\\utils\\paths.rs"), + row: Some(101), + column: None, + } + ); } #[test] From 469dfe759c7489ffb1352a74689cb19cae51a84f Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 17:21:20 +0200 Subject: [PATCH 134/762] ssh: Handle `~` in ssh filenames (#17939) This allows users to open `ssh://user@host/~/my-home-dir-folder`. Release Notes: - N/A Co-authored-by: Conrad --- crates/project/src/worktree_store.rs | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 8b620de43e0838..07764d4a05ce71 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -182,14 +182,23 @@ impl WorktreeStore { visible: bool, cx: &mut ModelContext, ) -> Task, Arc>> { - let abs_path = abs_path.as_ref(); - let root_name = abs_path.file_name().unwrap().to_string_lossy().to_string(); - let path = abs_path.to_string_lossy().to_string(); + let mut abs_path = abs_path.as_ref().to_string_lossy().to_string(); + // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/` + // in which case want to strip the leading the `/` and expand the tilde. + // That's what git does too: https://github.com/libgit2/libgit2/issues/3345#issuecomment-127050850 + if abs_path.starts_with("/~") { + abs_path = shellexpand::tilde(&abs_path[1..]).to_string(); + } + let root_name = PathBuf::from(abs_path.clone()) + .file_name() + .unwrap() + .to_string_lossy() + .to_string(); cx.spawn(|this, mut cx| async move { let response = client .request(proto::AddWorktree { project_id: SSH_PROJECT_ID, - path: path.clone(), + path: abs_path.clone(), }) .await?; let worktree = cx.update(|cx| { @@ -200,7 +209,7 @@ impl WorktreeStore { id: response.worktree_id, root_name, visible, - abs_path: path, + abs_path, }, client, cx, From 10cfaecffaa8c3705085d9faab714121a78e6f3d Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Tue, 17 Sep 2024 18:28:03 +0300 Subject: [PATCH 135/762] ocaml: Add Dune language support (#17886) This uses my fork of the dune tree-sitter grammar to include the generated files Release Notes: - N/A --- extensions/ocaml/extension.toml | 4 ++++ extensions/ocaml/languages/dune/config.toml | 8 ++++++++ extensions/ocaml/languages/dune/highlights.scm | 5 +++++ extensions/ocaml/languages/dune/injections.scm | 2 ++ 4 files changed, 19 insertions(+) create mode 100644 extensions/ocaml/languages/dune/config.toml create mode 100644 extensions/ocaml/languages/dune/highlights.scm create mode 100644 extensions/ocaml/languages/dune/injections.scm diff --git a/extensions/ocaml/extension.toml b/extensions/ocaml/extension.toml index 99f83d126307c2..0523ba83b6b8e8 100644 --- a/extensions/ocaml/extension.toml +++ b/extensions/ocaml/extension.toml @@ -19,3 +19,7 @@ path = "grammars/ocaml" repository = "https://github.com/tree-sitter/tree-sitter-ocaml" commit = "0b12614ded3ec7ed7ab7933a9ba4f695ba4c342e" path = "grammars/interface" + +[grammars.dune] +repository = "https://github.com/WHForks/tree-sitter-dune" +commit = "b3f7882e1b9a1d8811011bf6f0de1c74c9c93949" diff --git a/extensions/ocaml/languages/dune/config.toml b/extensions/ocaml/languages/dune/config.toml new file mode 100644 index 00000000000000..332e3f112ea5db --- /dev/null +++ b/extensions/ocaml/languages/dune/config.toml @@ -0,0 +1,8 @@ +name = "Dune" +grammar = "dune" +path_suffixes = ["dune", "dune-project"] +brackets = [ + { start = "(", end = ")", close = true, newline = true }, + { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] } +] +tab_size = 2 diff --git a/extensions/ocaml/languages/dune/highlights.scm b/extensions/ocaml/languages/dune/highlights.scm new file mode 100644 index 00000000000000..e7a21cd2c51c03 --- /dev/null +++ b/extensions/ocaml/languages/dune/highlights.scm @@ -0,0 +1,5 @@ +(stanza_name) @function +(field_name) @property +(quoted_string) @string +(multiline_string) @string +(action_name) @keyword diff --git a/extensions/ocaml/languages/dune/injections.scm b/extensions/ocaml/languages/dune/injections.scm new file mode 100644 index 00000000000000..441998290e76df --- /dev/null +++ b/extensions/ocaml/languages/dune/injections.scm @@ -0,0 +1,2 @@ +((ocaml_syntax) @injection.content + (#set! injection.language "ocaml")) From 7c54965b11c096341aeee64a0ed8079890c307d1 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 17 Sep 2024 17:39:06 +0200 Subject: [PATCH 136/762] docs: Add dark mode (#17940) Closes https://github.com/zed-industries/zed/issues/17911 This PR enables dark mode on the documentation. This is done without any special plugins, just pure JavaScript and CSS variables. I may open fast-follow PRs to fine-tune design and code details that haven't been super polished yet. For example, when switching to dark mode, the `class` attribute on the `html` tag would change immediately, whereas other attributes such as `data-theme` and `data-color-scheme` would require a full refresh. This seems to be resolved, but not 100% confident yet. --- Release Notes: - Enabled dark mode on the documentation --- docs/theme/css/chrome.css | 47 +++++++++------- docs/theme/css/general.css | 14 ++--- docs/theme/css/variables.css | 105 ++++++++++++++++++++++++++++++++--- docs/theme/index.hbs | 38 ++++++++++--- docs/theme/page-toc.css | 2 +- docs/theme/plugins.css | 2 +- docs/theme/plugins.js | 62 +++++++++++++++++++++ 7 files changed, 224 insertions(+), 46 deletions(-) diff --git a/docs/theme/css/chrome.css b/docs/theme/css/chrome.css index 7afeabc704ba36..11f16848d70eb4 100644 --- a/docs/theme/css/chrome.css +++ b/docs/theme/css/chrome.css @@ -3,7 +3,7 @@ @import "variables.css"; html { - background-color: rgb(246, 245, 240); + background-color: var(--bg); scrollbar-color: var(--scrollbar) var(--bg); } #searchresults a, @@ -58,7 +58,7 @@ a > .hljs { height: var(--menu-bar-height); } #menu-bar.bordered { - border-block-end-color: var(--table-border-color); + border-block-end-color: var(--divider); } #menu-bar i, #menu-bar .icon-button { @@ -73,7 +73,7 @@ a > .hljs { transition: color 0.5s; } #menu-bar .icon-button:hover { - background-color: hsl(219, 93%, 42%, 0.15); + background-color: var(--icon-btn-bg-hover); } @media only screen and (max-width: 420px) { @@ -116,6 +116,7 @@ a > .hljs { align-items: center; flex: 1; overflow: hidden; + filter: var(--logo-brightness); } .js .menu-title { cursor: pointer; @@ -249,9 +250,10 @@ a:hover > .hljs { } pre { - background-color: white; - border: 1px rgba(8, 76, 207, 0.3) solid; - box-shadow: rgba(8, 76, 207, 0.07) 4px 4px 0px 0px; + background-color: var(--pre-bg); + border: 1px solid; + border-color: var(--pre-border); + box-shadow: var(--pre-shadow) 4px 4px 0px 0px; position: relative; } pre > .hljs { @@ -445,7 +447,8 @@ ul#searchresults span.teaser em { overscroll-behavior-y: contain; background-color: var(--sidebar-bg); color: var(--sidebar-fg); - border-right: 1px solid hsl(219, 93%, 42%, 0.15); + border-right: 1px solid; + border-color: var(--divider); } [dir="rtl"] .sidebar { left: unset; @@ -606,7 +609,7 @@ ul#searchresults span.teaser em { margin: 5px 0px; } .chapter .spacer { - background-color: var(--sidebar-spacer); + background-color: var(--divider); } @media (-moz-touch-enabled: 1), (pointer: coarse) { @@ -628,11 +631,11 @@ ul#searchresults span.teaser em { .theme-popup { position: absolute; - left: 10px; - top: var(--menu-bar-height); + left: 32px; + top: calc(var(--menu-bar-height) - 12px); z-index: 1000; border-radius: 4px; - font-size: 0.7em; + font-size: 1.4rem; color: var(--fg); background: var(--theme-popup-bg); border: 1px solid var(--theme-popup-border); @@ -654,7 +657,7 @@ ul#searchresults span.teaser em { width: 100%; border: 0; margin: 0; - padding: 2px 20px; + padding: 2px 24px; line-height: 25px; white-space: nowrap; text-align: start; @@ -662,32 +665,36 @@ ul#searchresults span.teaser em { color: inherit; background: inherit; font-size: inherit; + font-family: inherit; } .theme-popup .theme:hover { background-color: var(--theme-hover); } .theme-selected::before { + font-family: Arial, Helvetica, sans-serif; + text-align: center; display: inline-block; content: "✓"; - margin-inline-start: -14px; - width: 14px; + margin-inline-start: -20px; + width: 20px; } .download-button { - background: hsl(220, 60%, 95%); - color: hsl(220, 60%, 30%); + background: var(--download-btn-bg); + color: var(--download-btn-color); padding: 4px 8px; - border: 1px solid hsla(220, 60%, 40%, 0.2); + border: 1px solid; + border-color: var(--download-btn-border); font-size: 1.4rem; border-radius: 4px; - box-shadow: hsla(220, 40%, 60%, 0.1) 0px -2px 0px 0px inset; + box-shadow: var(--download-btn-shadow) 0px -2px 0px 0px inset; transition: 100ms; transition-property: box-shadow, border-color, background-color; } .download-button:hover { - background: hsl(220, 60%, 93%); - border-color: hsla(220, 60%, 50%, 0.2); + background: var(--download-btn-bg); + border-color: var(--download-btn-border-hover); box-shadow: none; } diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index 9a20751f211e74..e6905b47bf485b 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -174,10 +174,10 @@ h6:target::before { } .content a { text-decoration: underline; - text-decoration-color: hsl(219, 93%, 42%, 0.2); + text-decoration-color: var(--link-line-decoration); } .content a:hover { - text-decoration-color: hsl(219, 93%, 42%, 0.5); + text-decoration-color: var(--link-line-decoration-hover); } .content img, .content video { @@ -219,7 +219,7 @@ table thead td { } table thead th { padding: 6px 12px; - color: #000; + color: var(--full-contrast); text-align: left; border: 1px var(--table-border-color) solid; } @@ -235,7 +235,7 @@ blockquote { margin: auto; margin-top: 1rem; padding: 1rem 1.25rem; - color: #000; + color: var(--full-contrast); background-color: var(--quote-bg); border: 1px solid var(--quote-border); } @@ -315,7 +315,7 @@ kbd { font-size: 1.4rem; margin: 0.5em 0; border-bottom: 1px solid; - border-color: var(--border-light); + border-color: var(--divider); } .footnote-definition p { display: inline; @@ -356,6 +356,6 @@ kbd { } code.hljs { - color: hsl(221, 13%, 10%) !important; - background-color: hsla(221, 93%, 42%, 0.1); + color: var(--code-text) !important; + background-color: var(--code-bg); } diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index 481cb5a6c183c2..59f239826472a8 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -1,6 +1,10 @@ /* Globals */ :root { + --color-scheme: light; + + --logo-brightness: brightness(1); + --sidebar-width: 300px; --sidebar-resize-indicator-width: 0px; --sidebar-resize-indicator-space: 2px; @@ -24,18 +28,30 @@ --sidebar-fg: hsl(0, 0%, 0%); --sidebar-non-existant: #aaaaaa; - --sidebar-active: hsl(219, 93%, 42%); - --sidebar-active-bg: hsl(219, 93%, 42%, 0.1); - --sidebar-spacer: #f4f4f4; + --sidebar-active: hsl(220, 93%, 42%); + --sidebar-active-bg: hsl(220, 93%, 42%, 0.1); + --divider: hsl(220, 93%, 42%, 0.15); --scrollbar: #8f8f8f; --icons: #747474; --icons-hover: #000000; + --icon-btn-bg-hover: hsl(220, 93%, 42%, 0.15); - --links: rgb(8, 76, 207); + --links: hsl(220, 92%, 42%); + --link-line-decoration: hsl(220, 93%, 42%, 0.2); + --link-line-decoration-hover: hsl(220, 93%, 42%, 0.5); + + --full-contrast: #000; --inline-code-color: #301900; + --code-text: hsl(220, 13%, 10%); + --code-bg: hsl(220, 93%, 42%, 0.1); + --keybinding-bg: hsl(0, 0%, 94%); + + --pre-bg: #fff; + --pre-border: hsla(220, 93%, 42%, 0.3); + --pre-shadow: hsla(220, 93%, 42%, 0.07); --theme-popup-bg: #fafafa; --theme-popup-border: #cccccc; @@ -48,9 +64,9 @@ --warning-bg: hsl(42, 100%, 60%, 0.1); --warning-icon: hsl(42, 100%, 30%); - --table-header-bg: hsl(219, 50%, 90%, 0.4); - --table-border-color: hsl(219, 93%, 42%, 0.15); - --table-alternate-bg: hsl(219, 10%, 90%, 0.4); + --table-header-bg: hsl(220, 50%, 90%, 0.4); + --table-border-color: hsl(220, 93%, 42%, 0.15); + --table-alternate-bg: hsl(220, 10%, 90%, 0.4); --searchbar-border-color: #aaa; --searchbar-bg: #fafafa; @@ -61,5 +77,78 @@ --searchresults-li-bg: #e4f2fe; --search-mark-bg: #a2cff5; - --color-scheme: light; + --download-btn-bg: hsl(220, 60%, 95%); + --download-btn-bg-hover: hsl(220, 60%, 93%); + --download-btn-color: hsl(220, 60%, 30%); + --download-btn-border: hsla(220, 60%, 40%, 0.2); + --download-btn-border-hover: hsla(220, 60%, 50%, 0.2); + --download-btn-shadow: hsla(220, 40%, 60%, 0.1); +} + +.dark { + --color-scheme: dark; + + --logo-brightness: brightness(2); + + --bg: hsl(220, 13%, 10%); + --fg: hsl(220, 14%, 70%); + --title-color: hsl(220, 92%, 80%); + + --border: hsl(220, 13%, 20%); + --border-light: hsl(220, 13%, 90%); + --border-hover: hsl(220, 13%, 40%); + + --sidebar-bg: hsl(220, 13%, 10%); + --sidebar-fg: hsl(220, 14%, 71%); + --sidebar-non-existant: #505254; + --sidebar-active: hsl(220, 92%, 75%); + --sidebar-active-bg: hsl(220, 93%, 42%, 0.25); + + --divider: hsl(220, 13%, 20%); + --scrollbar: hsl(220, 13%, 30%); + + --icons: hsl(220, 14%, 71%); + --icons-hover: hsl(220, 14%, 90%); + --icon-btn-bg-hover: hsl(220, 93%, 42%, 0.4); + + --links: hsl(220, 93%, 70%); + --link-line-decoration: hsl(220, 92%, 80%, 0.4); + --link-line-decoration-hover: hsl(220, 92%, 80%, 0.8); + --full-contrast: #fff; + + --inline-code-color: hsl(40, 100%, 80%); + --code-text: hsl(220, 13%, 95%); + --code-bg: hsl(220, 93%, 50%, 0.2); + --keybinding-bg: hsl(0, 0%, 12%); + + --pre-bg: hsl(220, 13%, 5%); + --pre-border: hsla(220, 93%, 70%, 0.3); + --pre-shadow: hsla(220, 93%, 70%, 0.1); + + --theme-popup-bg: hsl(220, 13%, 15%); + --theme-popup-border: hsl(220, 13%, 20%); + --theme-hover: hsl(220, 13%, 25%); + + --quote-bg: hsl(220, 13%, 25%, 0.4); + --quote-border: hsl(220, 13%, 32%, 0.5); + + --table-border-color: hsl(220, 13%, 30%, 0.5); + --table-header-bg: hsl(220, 13%, 25%, 0.5); + --table-alternate-bg: hsl(220, 13%, 20%, 0.4); + + --searchbar-border-color: hsl(220, 13%, 30%); + --searchbar-bg: hsl(220, 13%, 22%, 0.5); + --searchbar-fg: hsl(220, 14%, 71%); + --searchbar-shadow-color: hsl(220, 13%, 15%); + --searchresults-header-fg: hsl(220, 14%, 60%); + --searchresults-border-color: hsl(220, 13%, 30%); + --searchresults-li-bg: hsl(220, 13%, 25%); + --search-mark-bg: hsl(220, 93%, 60%); + + --download-btn-bg: hsl(220, 90%, 90%, 0.1); + --download-btn-bg-hover: hsl(220, 90%, 50%, 0.2); + --download-btn-color: hsl(220, 90%, 95%); + --download-btn-border: hsla(220, 90%, 80%, 0.2); + --download-btn-border-hover: hsla(220, 90%, 80%, 0.4); + --download-btn-shadow: hsla(220, 50%, 60%, 0.15); } diff --git a/docs/theme/index.hbs b/docs/theme/index.hbs index 8976b54bd96014..c4154b46d33b77 100644 --- a/docs/theme/index.hbs +++ b/docs/theme/index.hbs @@ -1,5 +1,5 @@ - + @@ -56,13 +56,15 @@ var default_theme = window.matchMedia("(prefers-color-scheme: dark)").matches ? "{{ preferred_dark_theme }}" : "{{ default_theme }}"; - + + + diff --git a/docs/theme/page-toc.css b/docs/theme/page-toc.css index dacd61a09b685a..af9b2fbbe1202c 100644 --- a/docs/theme/page-toc.css +++ b/docs/theme/page-toc.css @@ -74,6 +74,6 @@ margin-bottom: 12px; padding-left: 12px; font-size: 1.4rem; - color: #000; + color: var(--full-contrast); } } diff --git a/docs/theme/plugins.css b/docs/theme/plugins.css index 9deee5d5baf9ff..9d5d09fe736a96 100644 --- a/docs/theme/plugins.css +++ b/docs/theme/plugins.css @@ -1,5 +1,5 @@ kbd.keybinding { - background-color: #f0f0f0; + background-color: var(--keybinding-bg); padding: 2px 4px; border-radius: 3px; font-family: monospace; diff --git a/docs/theme/plugins.js b/docs/theme/plugins.js index eee842f15a9f44..76a295353f7abc 100644 --- a/docs/theme/plugins.js +++ b/docs/theme/plugins.js @@ -48,3 +48,65 @@ console.log("Operating System:", os); // Start the process from the body walkDOM(document.body); })(); + +function darkModeToggle() { + var html = document.documentElement; + var themeToggleButton = document.getElementById("theme-toggle"); + var themePopup = document.getElementById("theme-list"); + var themePopupButtons = themePopup.querySelectorAll("button"); + + function setTheme(theme) { + html.setAttribute("data-theme", theme); + html.setAttribute("data-color-scheme", theme); + html.className = theme; + localStorage.setItem("mdbook-theme", theme); + + // Force a repaint to ensure the changes take effect in the client immediately + document.body.style.display = "none"; + document.body.offsetHeight; + document.body.style.display = ""; + } + + themeToggleButton.addEventListener("click", function (event) { + event.preventDefault(); + themePopup.style.display = + themePopup.style.display === "block" ? "none" : "block"; + }); + + themePopupButtons.forEach(function (button) { + button.addEventListener("click", function () { + setTheme(this.id); + themePopup.style.display = "none"; + }); + }); + + document.addEventListener("click", function (event) { + if ( + !themePopup.contains(event.target) && + !themeToggleButton.contains(event.target) + ) { + themePopup.style.display = "none"; + } + }); + + // Set initial theme + var currentTheme = localStorage.getItem("mdbook-theme"); + if (currentTheme) { + setTheme(currentTheme); + } else { + // If no theme is set, use the system's preference + var systemPreference = window.matchMedia("(prefers-color-scheme: dark)") + .matches + ? "dark" + : "light"; + setTheme(systemPreference); + } + + // Listen for system's preference changes + const darkModeMediaQuery = window.matchMedia("(prefers-color-scheme: dark)"); + darkModeMediaQuery.addEventListener("change", function (e) { + if (!localStorage.getItem("mdbook-theme")) { + setTheme(e.matches ? "dark" : "light"); + } + }); +} From d5003e1121045b1c8f505b3ff592630d2e277605 Mon Sep 17 00:00:00 2001 From: VacheDesNeiges <33199153+VacheDesNeiges@users.noreply.github.com> Date: Tue, 17 Sep 2024 17:40:43 +0200 Subject: [PATCH 137/762] Update C++ Tree-sitter queries (#17471) Closes #16443 Release Notes: - Fixed C++ functions being wrongly tagged as variables when called after two or more scope resolution operators. - Added a "namespace" tag for highlighting purposes Before : ![image](https://github.com/user-attachments/assets/743b8407-4e62-4549-9c6a-ed6608ea7e43) After : ![image](https://github.com/user-attachments/assets/de563621-e722-463c-97a1-a99b925f126e) --- crates/languages/src/cpp/highlights.scm | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/crates/languages/src/cpp/highlights.scm b/crates/languages/src/cpp/highlights.scm index 42d05bde33e0ab..4c9db569285e39 100644 --- a/crates/languages/src/cpp/highlights.scm +++ b/crates/languages/src/cpp/highlights.scm @@ -1,10 +1,33 @@ (identifier) @variable (field_identifier) @property +(namespace_identifier) @namespace (call_expression function: (qualified_identifier name: (identifier) @function)) +(call_expression + (qualified_identifier + (identifier) @function.call)) + +(call_expression + (qualified_identifier + (qualified_identifier + (identifier) @function.call))) + +(call_expression + (qualified_identifier + (qualified_identifier + (qualified_identifier + (identifier) @function.call)))) + +((qualified_identifier + (qualified_identifier + (qualified_identifier + (qualified_identifier + (identifier) @function.call)))) @_parent + (#has-ancestor? @_parent call_expression)) + (call_expression function: (identifier) @function) From e88b48a9c7467aa69a4f7d6dd657bbc1a294e32a Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 11:42:20 -0400 Subject: [PATCH 138/762] ocaml: Fix indentation in files (#17942) This PR fixes the indentation in the Dune-related files after https://github.com/zed-industries/zed/pull/17886. Release Notes: - N/A --- extensions/ocaml/languages/dune/config.toml | 4 ++-- extensions/ocaml/languages/dune/injections.scm | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/extensions/ocaml/languages/dune/config.toml b/extensions/ocaml/languages/dune/config.toml index 332e3f112ea5db..b4f79850b64336 100644 --- a/extensions/ocaml/languages/dune/config.toml +++ b/extensions/ocaml/languages/dune/config.toml @@ -2,7 +2,7 @@ name = "Dune" grammar = "dune" path_suffixes = ["dune", "dune-project"] brackets = [ - { start = "(", end = ")", close = true, newline = true }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] } + { start = "(", end = ")", close = true, newline = true }, + { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] } ] tab_size = 2 diff --git a/extensions/ocaml/languages/dune/injections.scm b/extensions/ocaml/languages/dune/injections.scm index 441998290e76df..654b5b2c13967a 100644 --- a/extensions/ocaml/languages/dune/injections.scm +++ b/extensions/ocaml/languages/dune/injections.scm @@ -1,2 +1,2 @@ ((ocaml_syntax) @injection.content - (#set! injection.language "ocaml")) + (#set! injection.language "ocaml")) From 27f09957c2066f0f4131f5daafb95085e3ce6dbb Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 11:53:52 -0400 Subject: [PATCH 139/762] Improve CSS highlighting for property_name (#17324) --- crates/languages/src/css/highlights.scm | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/css/highlights.scm b/crates/languages/src/css/highlights.scm index e271d8583c661b..4ddfe9a418ca70 100644 --- a/crates/languages/src/css/highlights.scm +++ b/crates/languages/src/css/highlights.scm @@ -35,10 +35,11 @@ (class_name) (id_name) (namespace_name) - (property_name) (feature_name) ] @property +(property_name) @constant + (function_name) @function ( @@ -75,4 +76,17 @@ [ "," ":" + "." + "::" + ";" + "#" ] @punctuation.delimiter + +[ + "{" + ")" + "(" + "}" + "[" + "]" +] @punctuation.bracket From accff826ca842fc8c297fab317f464bc9c695d75 Mon Sep 17 00:00:00 2001 From: Albert Marashi Date: Wed, 18 Sep 2024 01:32:25 +0930 Subject: [PATCH 140/762] svelte: Migrate to `tree-sitter-grammars/tree-sitter-svelte` (#17529) > [!NOTE] > The https://github.com/tree-sitter-grammars/tree-sitter-svelte repository seems to be more well maintained, with higher quality code, and as per https://github.com/zed-extensions/svelte/issues/1 it was suggested that we swap to this repository for Svelte grammars - Closes https://github.com/zed-industries/zed/issues/17310 - Closes https://github.com/zed-industries/zed/issues/10893 - Closes https://github.com/zed-industries/zed/issues/12833 - Closes https://github.com/zed-extensions/svelte/issues/1 - Closes https://github.com/zed-industries/zed/issues/14943 - Closes https://github.com/zed-extensions/svelte/issues/2 - Added: buffer/file symbol outlines for `.svelte` (`outlines.scm`) - Improved: Attribute directives & modifiers in `.svelte` files can be styled independently. - Fixed: issue where svelte expression inside quotes failed parsing - Improved: Svelte components in Markup are styled differently from tags. - Added: Support for Svelte 5 syntax (`{#snippet children()}`, `{@render foo()`) - Change: Svelte now using [tree-sitter-grammars/tree-sitter-svelte](https://github.com/tree-sitter-grammars/tree-sitter-svelte) for language highlighting - Added: Support for typescript syntax in svelte expressions ![image](https://github.com/user-attachments/assets/49d199ee-7550-49a7-912d-070cf691b029) ![image](https://github.com/user-attachments/assets/848ac5b6-62da-4c42-8e24-b7023504f8af) Release Notes: - N/A --- **tree-sitter-grammar things to improve** - [ ] snippet functions aren't being treated as JS code - [ ] we should be able to detect @component comments and treat them as markdown - [x] `foo:bar` style/class/prop directives - [x] `--foo="..."` var fields - [ ] snippet/if blocks's children may need to be indented a little further Will implement some of the rest of these in a separate PR --------- Co-authored-by: Marshall Bowers --- docs/src/languages/svelte.md | 23 ++- extensions/svelte/extension.toml | 4 +- .../svelte/languages/svelte/brackets.scm | 7 + .../svelte/languages/svelte/config.toml | 12 +- .../svelte/languages/svelte/highlights.scm | 121 +++++++++++---- .../svelte/languages/svelte/injections.scm | 142 ++++++++++-------- .../svelte/languages/svelte/outline.scm | 69 +++++++++ 7 files changed, 268 insertions(+), 110 deletions(-) create mode 100644 extensions/svelte/languages/svelte/brackets.scm create mode 100644 extensions/svelte/languages/svelte/outline.scm diff --git a/docs/src/languages/svelte.md b/docs/src/languages/svelte.md index 1c6fd49b3e25ff..157a57d43e1b8f 100644 --- a/docs/src/languages/svelte.md +++ b/docs/src/languages/svelte.md @@ -2,16 +2,29 @@ Svelte support is available through the [Svelte extension](https://github.com/zed-industries/zed/tree/main/extensions/svelte). -- Tree Sitter: [Himujjal/tree-sitter-svelte](https://github.com/Himujjal/tree-sitter-svelte) +- Tree Sitter: [tree-sitter-grammars/tree-sitter-svelte](https://github.com/tree-sitter-grammars/tree-sitter-svelte) - Language Server: [sveltejs/language-tools](https://github.com/sveltejs/language-tools) - +## Extra theme styling configuration + +You can modify how certain styles such as directives and modifiers appear in attributes: + +```json +"syntax": { + // Styling for directives (e.g., `class:foo` or `on:click`) (the `on` or `class` part of the attribute). + "attribute.function": { + "color": "#ff0000" + }, + // Styling for modifiers at the end of attributes, e.g. `on:` + "attribute.special": { + "color": "#00ff00" + } +} +``` ## Inlay Hints -Zed sets the following initialization options for inlay Hints: +Zed sets the following initialization options for inlay hints: ```json "inlayHints": { diff --git a/extensions/svelte/extension.toml b/extensions/svelte/extension.toml index 9ca1d6c5daafdb..694fdec2a67c56 100644 --- a/extensions/svelte/extension.toml +++ b/extensions/svelte/extension.toml @@ -11,5 +11,5 @@ name = "Svelte Language Server" language = "Svelte" [grammars.svelte] -repository = "https://github.com/Himujjal/tree-sitter-svelte" -commit = "b08d070e303d2a385d6d0ab3add500f8fa514443" +repository = "https://github.com/tree-sitter-grammars/tree-sitter-svelte" +commit = "3f06f705410683adb17d146b5eca28c62fe81ba6" diff --git a/extensions/svelte/languages/svelte/brackets.scm b/extensions/svelte/languages/svelte/brackets.scm new file mode 100644 index 00000000000000..deb34f80a806ff --- /dev/null +++ b/extensions/svelte/languages/svelte/brackets.scm @@ -0,0 +1,7 @@ +("<" @open ">" @close) +("{" @open "}" @close) +("'" @open "'" @close) +("\"" @open "\"" @close) +("(" @open ")" @close) +; ("[" @open "]" @close) +; ("`" @open "`" @close) diff --git a/extensions/svelte/languages/svelte/config.toml b/extensions/svelte/languages/svelte/config.toml index 3bab2f29430813..4db968be8c8f64 100644 --- a/extensions/svelte/languages/svelte/config.toml +++ b/extensions/svelte/languages/svelte/config.toml @@ -2,16 +2,16 @@ name = "Svelte" grammar = "svelte" path_suffixes = ["svelte"] block_comment = [""] -autoclose_before = ";:.,=}])>" +autoclose_before = ":\"'}]>" brackets = [ { start = "{", end = "}", close = true, newline = true }, + { start = "<", end = ">", close = true, newline = true, not_in = ["string"] }, { start = "[", end = "]", close = true, newline = true }, { start = "(", end = ")", close = true, newline = true }, - { start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] }, - { start = "`", end = "`", close = true, newline = false, not_in = ["string"] }, - { start = "/*", end = " */", close = true, newline = false, not_in = ["string", "comment"] }, + { start = "!--", end = " --", close = true, newline = true }, + { start = "\"", end = "\"", close = true, newline = true, not_in = ["string"] }, + { start = "'", end = "'", close = true, newline = true, not_in = ["string"] }, + { start = "`", end = "`", close = true, newline = true, not_in = ["string"] }, ] scope_opt_in_language_servers = ["tailwindcss-language-server"] prettier_parser_name = "svelte" diff --git a/extensions/svelte/languages/svelte/highlights.scm b/extensions/svelte/languages/svelte/highlights.scm index 4e317489d832fa..01d8bb8db6531b 100755 --- a/extensions/svelte/languages/svelte/highlights.scm +++ b/extensions/svelte/languages/svelte/highlights.scm @@ -1,50 +1,107 @@ -; Special identifiers -;-------------------- -; Treat capitalized tag names as constructors and types -((tag_name) @type - (#match? @type "^[A-Z]")) +; comments +(comment) @comment -; Regular (lowercase) tag names -((tag_name) @tag - (#match? @tag "^[a-z]")) +; property attribute +(attribute_directive) @attribute.function +(attribute_identifier) @attribute +(attribute_modifier) @attribute.special -; TODO: -(attribute_name) @property -(erroneous_end_tag_name) @keyword -(comment) @comment +; Style component attributes as @property +(start_tag + ( + (tag_name) @_tag_name + (#match? @_tag_name "^[A-Z]") + ) + (attribute + (attribute_name + (attribute_identifier) @tag.property + ) + ) +) -[ - (attribute_value) - (quoted_attribute_value) -] @string +(self_closing_tag + ( + (tag_name) @_tag_name + (#match? @_tag_name "^[A-Z]") + ) + (attribute + (attribute_name + (attribute_identifier) @tag.property + ) + ) +) -[ - (text) - (raw_text_expr) - (raw_text_each) -] @none + +; style elements starting with lowercase letters as tags +( + (tag_name) @tag + (#match? @tag "^[a-z]") +) + +; style elements starting with uppercase letters as components (types) +; Also valid might be to treat them as constructors +( + (tag_name) @tag @tag.component.type.constructor + (#match? @tag "^[A-Z]") +) [ - (special_block_keyword) - (then) - (as) -] @keyword + "<" + ">" + "" +] @tag.punctuation.bracket + [ "{" "}" ] @punctuation.bracket -"=" @operator +[ + "|" +] @punctuation.delimiter + [ - "<" - ">" - "" + "@" "#" ":" "/" - "@" -] @tag.delimiter +] @tag.punctuation.special + +"=" @operator + + +; Treating (if, each, ...) as a keyword inside of blocks +; like {#if ...} or {#each ...} +(block_start_tag + tag: _ @tag.keyword +) + +(block_tag + tag: _ @tag.keyword +) + +(block_end_tag + tag: _ @tag.keyword +) + +(expression_tag + tag: _ @tag.keyword +) + +; Style quoted string attribute values +(quoted_attribute_value) @string + + +; Highlight the `as` keyword in each blocks +(each_start + ("as") @tag.keyword +) + + +; Highlight the snippet name as a function +; (e.g. {#snippet foo(bar)} +(snippet_name) @function diff --git a/extensions/svelte/languages/svelte/injections.scm b/extensions/svelte/languages/svelte/injections.scm index 24f9425803e2d4..73d2b9abb1bb6d 100755 --- a/extensions/svelte/languages/svelte/injections.scm +++ b/extensions/svelte/languages/svelte/injections.scm @@ -1,74 +1,86 @@ -; injections.scm -; -------------- +; ; injections.scm +; ; -------------- -; match script tags without a lang tag -((script_element - (start_tag - (attribute - (attribute_name) @_name)*) - (raw_text) @content) - (#not-eq? @_name "lang") - (#set! "language" "javascript")) +; Match script tags with a lang attribute +(script_element + (start_tag + (attribute + (attribute_name) @_attr_name + (#eq? @_attr_name "lang") + (quoted_attribute_value + (attribute_value) @language + ) + ) + ) + (raw_text) @content +) -; match javascript -((script_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "js") - (#set! "language" "javascript")) +; Match script tags without a lang attribute +(script_element + (start_tag + (attribute + (attribute_name) @_attr_name + )* + ) + (raw_text) @content + (#not-any-of? @_attr_name "lang") + (#set! language "javascript") +) -; match typescript -((script_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "ts") - (#set! "language" "typescript")) +; Match the contents of the script's generics="T extends string" as typescript code +; +; Disabled for the time-being because tree-sitter is treating the generics +; attribute as a top-level typescript statement, where `T extends string` is +; not a valid top-level typescript statement. +; +; (script_element +; (start_tag +; (attribute +; (attribute_name) @_attr_name +; (#eq? @_attr_name "generics") +; (quoted_attribute_value +; (attribute_value) @content +; ) +; ) +; ) +; (#set! language "typescript") +; ) -(style_element - (raw_text) @content - (#set! "language" "css")) -; match style tags without a lang tag -((style_element - (start_tag - (attribute - (attribute_name) @_name)*) - (raw_text) @content) - (#not-eq? @_name "lang") - (#set! "language" "css")) +; Mark everything as typescript because it's +; a more generic superset of javascript +; Not sure if it's possible to somehow refer to the +; script's language attribute here. +((svelte_raw_text) @content + (#set! "language" "ts") +) -; match css -((style_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "css") - (#set! "language" "css")) +; Match style tags with a lang attribute +(style_element + (start_tag + (attribute + (attribute_name) @_attr_name + (#eq? @_attr_name "lang") + (quoted_attribute_value + (attribute_value) @language + ) + ) + ) + (raw_text) @content +) -; match scss -((style_element - (start_tag - (attribute - (attribute_name) @_name - (quoted_attribute_value (attribute_value) @_value))) - (raw_text) @content) - (#eq? @_name "lang") - (#eq? @_value "scss") - (#set! "language" "scss")) +; Match style tags without a lang attribute +(style_element + (start_tag + (attribute + (attribute_name) @_attr_name + )* + ) + (raw_text) @content + (#not-any-of? @_attr_name "lang") + (#set! language "css") +) -((raw_text_expr) @content - (#set! "language" "javascript")) -((raw_text_each) @content - (#set! "language" "javascript")) +; Downstream TODO: Style highlighting for `style:background="red"` and `style="background: red"` strings +; Downstream TODO: Style component comments as markdown diff --git a/extensions/svelte/languages/svelte/outline.scm b/extensions/svelte/languages/svelte/outline.scm new file mode 100644 index 00000000000000..8242ada2433de2 --- /dev/null +++ b/extensions/svelte/languages/svelte/outline.scm @@ -0,0 +1,69 @@ + +(script_element + (start_tag) @name + (raw_text) @context @item +) + +(script_element + (end_tag) @name @item +) + +(style_element + (start_tag) @name + (raw_text) @context +) @item + + +(document) @item + +(comment) @annotation + +(if_statement + (if_start) @name +) @item + +(else_block + (else_start) @name +) @item + +(else_if_block + (else_if_start) @name +) @item + +(element + (start_tag) @name +) @item + +(element + (self_closing_tag) @name +) @item + + +; (if_end) @name @item + +(each_statement + (each_start) @name +) @item + + +(snippet_statement + (snippet_start) @name +) @item + +(snippet_end) @name @item + +(html_tag) @name @item + +(const_tag) @name @item + +(await_statement + (await_start) @name +) @item + +(then_block + (then_start) @name +) @item + +(catch_block + (catch_start) @name +) @item From 76603a5fc6b27e7ac86b908429134036f1c49984 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 12:13:06 -0400 Subject: [PATCH 141/762] ocaml: Bump to v0.1.0 (#17945) This PR bumps the OCaml extension to v0.1.0. Changes: - https://github.com/zed-industries/zed/pull/16955 - https://github.com/zed-industries/zed/pull/17886 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/ocaml/Cargo.toml | 2 +- extensions/ocaml/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6d0f7f54a01770..6d6d8e85e6731a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14481,7 +14481,7 @@ dependencies = [ [[package]] name = "zed_ocaml" -version = "0.0.2" +version = "0.1.0" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/ocaml/Cargo.toml b/extensions/ocaml/Cargo.toml index 6e07327036e82c..6df98bec4cfb83 100644 --- a/extensions/ocaml/Cargo.toml +++ b/extensions/ocaml/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_ocaml" -version = "0.0.2" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/ocaml/extension.toml b/extensions/ocaml/extension.toml index 0523ba83b6b8e8..bff7c380b5523d 100644 --- a/extensions/ocaml/extension.toml +++ b/extensions/ocaml/extension.toml @@ -1,7 +1,7 @@ id = "ocaml" name = "OCaml" description = "OCaml support." -version = "0.0.2" +version = "0.1.0" schema_version = 1 authors = ["Rashid Almheiri <69181766+huwaireb@users.noreply.github.com>"] repository = "https://github.com/zed-industries/zed" From 3d69942f7141c689a8f607aa11f534303560862b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 17 Sep 2024 12:34:36 -0400 Subject: [PATCH 142/762] Use dev icons for dev bundles (#17944) Follow-up of https://github.com/zed-industries/zed/pull/17486/ actually using the dev icons for dev bundles Release Notes: - N/A --- crates/zed/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index ad02d4f38879ef..28d2c7f82576d8 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -134,7 +134,7 @@ tree-sitter-rust.workspace = true workspace = { workspace = true, features = ["test-support"] } [package.metadata.bundle-dev] -icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"] +icon = ["resources/app-icon-dev@2x.png", "resources/app-icon-dev.png"] identifier = "dev.zed.Zed-Dev" name = "Zed Dev" osx_minimum_system_version = "10.15.7" From 0070635b4de672af2510df73d8bbd2f1c8f92cce Mon Sep 17 00:00:00 2001 From: Albert Marashi Date: Wed, 18 Sep 2024 02:21:31 +0930 Subject: [PATCH 143/762] Styling option for separating terminal view background from background color (#17611) Closes #17313 Release Notes: - Added theme styling option to separate terminal background view from terminal background color, for transparent terminal backgrounds --- crates/repl/src/outputs/plain.rs | 2 +- crates/terminal_view/src/terminal_element.rs | 6 +++--- crates/theme/src/default_colors.rs | 2 ++ crates/theme/src/one_themes.rs | 1 + crates/theme/src/schema.rs | 8 ++++++++ crates/theme/src/styles/colors.rs | 5 ++++- 6 files changed, 19 insertions(+), 5 deletions(-) diff --git a/crates/repl/src/outputs/plain.rs b/crates/repl/src/outputs/plain.rs index 5b9960d7b8917d..4bf007b292fec8 100644 --- a/crates/repl/src/outputs/plain.rs +++ b/crates/repl/src/outputs/plain.rs @@ -75,7 +75,7 @@ pub fn text_style(cx: &mut WindowContext) -> TextStyle { font_size: theme::get_buffer_font_size(cx).into(), font_style: FontStyle::Normal, line_height: cx.line_height().into(), - background_color: Some(theme.colors().terminal_background), + background_color: Some(theme.colors().terminal_ansi_background), white_space: WhiteSpace::Normal, truncate: None, // These are going to be overridden per-cell diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index b3d2b40e742a1e..ce4be0e6797078 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -662,7 +662,7 @@ impl Element for TerminalElement { font_size: font_size.into(), font_style: FontStyle::Normal, line_height: line_height.into(), - background_color: Some(theme.colors().terminal_background), + background_color: Some(theme.colors().terminal_ansi_background), white_space: WhiteSpace::Normal, truncate: None, // These are going to be overridden per-cell @@ -778,7 +778,7 @@ impl Element for TerminalElement { &[TextRun { len, font: text_style.font(), - color: theme.colors().terminal_background, + color: theme.colors().terminal_ansi_background, background_color: None, underline: Default::default(), strikethrough: None, @@ -1158,7 +1158,7 @@ pub fn convert_color(fg: &terminal::alacritty_terminal::vte::ansi::Color, theme: NamedColor::BrightCyan => colors.terminal_ansi_bright_cyan, NamedColor::BrightWhite => colors.terminal_ansi_bright_white, NamedColor::Foreground => colors.terminal_foreground, - NamedColor::Background => colors.terminal_background, + NamedColor::Background => colors.terminal_ansi_background, NamedColor::Cursor => theme.players().local().cursor, NamedColor::DimBlack => colors.terminal_ansi_dim_black, NamedColor::DimRed => colors.terminal_ansi_dim_red, diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index 48e67a4cc0433e..4def0bb8d74d62 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -84,6 +84,7 @@ impl ThemeColors { terminal_foreground: black().light().step_12(), terminal_bright_foreground: black().light().step_11(), terminal_dim_foreground: black().light().step_10(), + terminal_ansi_background: neutral().light().step_1(), terminal_ansi_bright_black: black().light().step_11(), terminal_ansi_bright_red: red().light().step_10(), terminal_ansi_bright_green: green().light().step_10(), @@ -179,6 +180,7 @@ impl ThemeColors { editor_document_highlight_read_background: neutral().dark_alpha().step_4(), editor_document_highlight_write_background: neutral().dark_alpha().step_4(), terminal_background: neutral().dark().step_1(), + terminal_ansi_background: neutral().dark().step_1(), terminal_foreground: white().dark().step_12(), terminal_bright_foreground: white().dark().step_11(), terminal_dim_foreground: white().dark().step_10(), diff --git a/crates/theme/src/one_themes.rs b/crates/theme/src/one_themes.rs index 241344b8f318b6..69e69ce23dc8d1 100644 --- a/crates/theme/src/one_themes.rs +++ b/crates/theme/src/one_themes.rs @@ -105,6 +105,7 @@ pub(crate) fn one_dark() -> Theme { terminal_background: bg, // todo("Use one colors for terminal") + terminal_ansi_background: crate::black().dark().step_12(), terminal_foreground: crate::white().dark().step_12(), terminal_bright_foreground: crate::white().dark().step_11(), terminal_dim_foreground: crate::white().dark().step_10(), diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index a47dc23a7c2b69..cc62e2976ba364 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -421,6 +421,10 @@ pub struct ThemeColorsContent { #[serde(rename = "terminal.foreground")] pub terminal_foreground: Option, + /// Terminal ansi background color. + #[serde(rename = "terminal.ansi.background")] + pub terminal_ansi_background: Option, + /// Bright terminal foreground color. #[serde(rename = "terminal.bright_foreground")] pub terminal_bright_foreground: Option, @@ -792,6 +796,10 @@ impl ThemeColorsContent { .terminal_background .as_ref() .and_then(|color| try_parse_color(color).ok()), + terminal_ansi_background: self + .terminal_ansi_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()), terminal_foreground: self .terminal_foreground .as_ref() diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 349a79ba662794..04aba89a0d5e73 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -175,7 +175,7 @@ pub struct ThemeColors { // === // Terminal // === - /// Terminal background color. + /// Terminal layout background color. pub terminal_background: Hsla, /// Terminal foreground color. pub terminal_foreground: Hsla, @@ -184,6 +184,9 @@ pub struct ThemeColors { /// Dim terminal foreground color. pub terminal_dim_foreground: Hsla, + /// Terminal ansi background color. + pub terminal_ansi_background: Hsla, + /// Black ANSI terminal color. pub terminal_ansi_black: Hsla, /// Bright black ANSI terminal color. From ac5c35b3df32a0a68e74ed6dbf288bcc7055e050 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 13:26:34 -0400 Subject: [PATCH 144/762] theme: Fix casing of "ANSI" in doc comments (#17952) This PR fixes the casing of "ANSI" in some doc comments after #17611. Release Notes: - N/A --- crates/theme/src/schema.rs | 2 +- crates/theme/src/styles/colors.rs | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index cc62e2976ba364..0229b1ea98d590 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -421,7 +421,7 @@ pub struct ThemeColorsContent { #[serde(rename = "terminal.foreground")] pub terminal_foreground: Option, - /// Terminal ansi background color. + /// Terminal ANSI background color. #[serde(rename = "terminal.ansi.background")] pub terminal_ansi_background: Option, diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 04aba89a0d5e73..0b37be09923c79 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -183,10 +183,8 @@ pub struct ThemeColors { pub terminal_bright_foreground: Hsla, /// Dim terminal foreground color. pub terminal_dim_foreground: Hsla, - - /// Terminal ansi background color. + /// Terminal ANSI background color. pub terminal_ansi_background: Hsla, - /// Black ANSI terminal color. pub terminal_ansi_black: Hsla, /// Bright black ANSI terminal color. From ee8668ef45378bda7ca7b1f543310c8fcf226725 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 13:33:09 -0400 Subject: [PATCH 145/762] Bind `editor::Rewrap` to `alt-q` (#17953) This PR adds a keybinding for the `editor: rewrap` command. It is bound to `alt-q`, by default. In Vim mode, it is bound to `g q`. Release Notes: - N/A --- assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 1 + assets/keymaps/vim.json | 1 + 3 files changed, 3 insertions(+) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index bb5673dde6c178..02fc6d8e04da8a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -56,6 +56,7 @@ "shift-tab": "editor::TabPrev", "ctrl-k": "editor::CutToEndOfLine", // "ctrl-t": "editor::Transpose", + "alt-q": "editor::Rewrap", "ctrl-backspace": "editor::DeleteToPreviousWordStart", "ctrl-delete": "editor::DeleteToNextWordEnd", "shift-delete": "editor::Cut", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 9a0c08c3dcaddc..33536cc9ff9e4b 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -51,6 +51,7 @@ "shift-tab": "editor::TabPrev", "ctrl-k": "editor::CutToEndOfLine", "ctrl-t": "editor::Transpose", + "alt-q": "editor::Rewrap", "cmd-backspace": "editor::DeleteToBeginningOfLine", "cmd-delete": "editor::DeleteToEndOfLine", "alt-backspace": "editor::DeleteToPreviousWordStart", diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 54905b22678cd1..18b38384ef81ce 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -124,6 +124,7 @@ "g i": "vim::InsertAtPrevious", "g ,": "vim::ChangeListNewer", "g ;": "vim::ChangeListOlder", + "g q": "editor::Rewrap", "shift-h": "vim::WindowTop", "shift-m": "vim::WindowMiddle", "shift-l": "vim::WindowBottom", From ccfd4b1887e7dc30e839334d86713b671449c55b Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 17 Sep 2024 19:45:29 +0200 Subject: [PATCH 146/762] rust: Test rust-analyzer binary after finding in PATH (#17951) Release Notes: - N/A --------- Co-authored-by: Conrad Irwin --- crates/language/src/language.rs | 1 + crates/languages/src/rust.rs | 23 ++++++++++++++++++++++- crates/project/src/lsp_store.rs | 24 ++++++++++++++++++++++++ 3 files changed, 47 insertions(+), 1 deletion(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 8cd8c8079d7f0a..7901a49d0021f8 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -285,6 +285,7 @@ pub trait LspAdapterDelegate: Send + Sync { async fn which(&self, command: &OsStr) -> Option; async fn shell_env(&self) -> HashMap; async fn read_text_file(&self, path: PathBuf) -> Result; + async fn try_exec(&self, binary: LanguageServerBinary) -> Result<()>; } #[async_trait(?Send)] diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 456ea8e4492eda..5055bb69c1e6f5 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -61,7 +61,28 @@ impl LspAdapter for RustLspAdapter { }) => { let path = delegate.which(Self::SERVER_NAME.as_ref()).await; let env = delegate.shell_env().await; - (path, Some(env), None) + + if let Some(path) = path { + // It is surprisingly common for ~/.cargo/bin/rust-analyzer to be a symlink to + // /usr/bin/rust-analyzer that fails when you run it; so we need to test it. + log::info!("found rust-analyzer in PATH. trying to run `rust-analyzer --help`"); + match delegate + .try_exec(LanguageServerBinary { + path: path.clone(), + arguments: vec!["--help".into()], + env: Some(env.clone()), + }) + .await + { + Ok(()) => (Some(path), Some(env), None), + Err(err) => { + log::error!("failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {:?}", path, err); + (None, None, None) + } + } + } else { + (None, None, None) + } } // Otherwise, we use the configured binary. Some(BinarySettings { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index e1e6001d24b96b..2c718a42abbb84 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7133,6 +7133,30 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { which::which(command).ok() } + async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { + if self.fs.is_none() { + return Ok(()); + } + + let working_dir = self.worktree_root_path(); + let output = smol::process::Command::new(&command.path) + .args(command.arguments) + .envs(command.env.clone().unwrap_or_default()) + .current_dir(working_dir) + .output() + .await?; + + if output.status.success() { + return Ok(()); + } + Err(anyhow!( + "{}, stdout: {:?}, stderr: {:?}", + output.status, + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + )) + } + fn update_status( &self, server_name: LanguageServerName, From 5cdca6d8dd8c2179a85293376e8db324c34b6945 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 13:49:52 -0400 Subject: [PATCH 147/762] multi_buffer: Fix a panic when expanding an excerpt with the cursor at the end (#17955) This PR fixes a panic when expanding an excerpt within a multibuffer that could occur when the cursor was at the end of the buffer. You can reproduce this by opening a multibuffer, putting your cursor at the very end of that buffer, and then expanding the excerpt (Shift + Enter). Release Notes: - Fixed a panic that could occur when expanding an excerpt within a multibuffer when the cursor was at the end of the excerpt. Co-authored-by: Antonio --- crates/multi_buffer/src/multi_buffer.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index f7172ccf08d297..5b6eddd5b16bf1 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -3790,7 +3790,7 @@ impl MultiBufferSnapshot { } } - // Returns the locators referenced by the given excerpt ids, sorted by locator. + /// Returns the locators referenced by the given excerpt IDs, sorted by locator. fn excerpt_locators_for_ids( &self, ids: impl IntoIterator, @@ -3801,13 +3801,17 @@ impl MultiBufferSnapshot { while sorted_ids.last() == Some(&ExcerptId::max()) { sorted_ids.pop(); - locators.push(Locator::max()); + if let Some(mapping) = self.excerpt_ids.last() { + locators.push(mapping.locator.clone()); + } } let mut sorted_ids = sorted_ids.into_iter().dedup().peekable(); if sorted_ids.peek() == Some(&ExcerptId::min()) { sorted_ids.next(); - locators.push(Locator::min()); + if let Some(mapping) = self.excerpt_ids.first() { + locators.push(mapping.locator.clone()); + } } let mut cursor = self.excerpt_ids.cursor::(); From 345efa4e36dd40a3eaf50ab5d024a06ed4b1f7b9 Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Wed, 18 Sep 2024 01:50:36 +0800 Subject: [PATCH 148/762] gpui: Fix img element to render correct SVG color (#15488) Release Notes: - N/A It should convert RGBA to BGRA. > I added an example color svg, that was I make based on [Lucide grip icon](https://lucide.dev/icons/grip). ## Before image ## After image Co-authored-by: Marshall Bowers --- crates/gpui/examples/image/color.svg | 13 +++++++++++++ crates/gpui/examples/image/image.rs | 2 +- crates/gpui/src/elements/img.rs | 7 ++++++- 3 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 crates/gpui/examples/image/color.svg diff --git a/crates/gpui/examples/image/color.svg b/crates/gpui/examples/image/color.svg new file mode 100644 index 00000000000000..84e9809d09492a --- /dev/null +++ b/crates/gpui/examples/image/color.svg @@ -0,0 +1,13 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index cc8e1a686c8a06..ac7af186d30ba0 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -131,7 +131,7 @@ fn main() { PathBuf::from_str("crates/gpui/examples/image/app-icon.png").unwrap(), ), remote_resource: "https://picsum.photos/512/512".into(), - asset_resource: "image/app-icon.png".into(), + asset_resource: "image/color.svg".into(), }) }) .unwrap(); diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 07f5acc95b1bc9..f1e8bb68e3237e 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -408,9 +408,14 @@ impl Asset for ImageAsset { // TODO: Can we make svgs always rescale? svg_renderer.render_pixmap(&bytes, SvgSize::ScaleFactor(1.0))?; - let buffer = + let mut buffer = ImageBuffer::from_raw(pixmap.width(), pixmap.height(), pixmap.take()).unwrap(); + // Convert from RGBA to BGRA. + for pixel in buffer.chunks_exact_mut(4) { + pixel.swap(0, 2); + } + RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1)) }; From 7246a0f39ce1f529d4e041c3316491e96152f1fe Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 13:51:11 -0400 Subject: [PATCH 149/762] macos: Use ~/Library/Caches/Zed instead of ~/.cache/zed (#17949) --- crates/paths/src/paths.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index b80bef5f2d623e..7f662d0325d1bf 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -59,6 +59,12 @@ pub fn support_dir() -> &'static PathBuf { pub fn temp_dir() -> &'static PathBuf { static TEMP_DIR: OnceLock = OnceLock::new(); TEMP_DIR.get_or_init(|| { + if cfg!(target_os = "macos") { + return dirs::cache_dir() + .expect("failed to determine cachesDirectory directory") + .join("Zed"); + } + if cfg!(target_os = "windows") { return dirs::cache_dir() .expect("failed to determine LocalAppData directory") From e6c4076ef0e59d4cbbdbbecb22ac57c23e6e69c2 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 17 Sep 2024 14:07:50 -0400 Subject: [PATCH 150/762] Add cmake to dev build instructions (#17943) Release Notes: - N/A --- docs/src/development/macos.md | 6 ++++++ docs/src/development/windows.md | 1 + script/linux | 6 ++++++ 3 files changed, 13 insertions(+) diff --git a/docs/src/development/macos.md b/docs/src/development/macos.md index 160ae3dd0e4821..1407b0f610dc68 100644 --- a/docs/src/development/macos.md +++ b/docs/src/development/macos.md @@ -29,6 +29,12 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). rustup target add wasm32-wasip1 ``` +- Install `cmake` (required by [a dependency](https://docs.rs/wasmtime-c-api-impl/latest/wasmtime_c_api/)) + + ```sh + brew install cmake + ``` + ## Backend Dependencies If you are developing collaborative features of Zed, you'll need to install the dependencies of zed's `collab` server: diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index 600c030931f27f..86eb21965e550e 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -22,6 +22,7 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). - Install [Visual Studio](https://visualstudio.microsoft.com/downloads/) with the optional component `MSVC v*** - VS YYYY C++ x64/x86 build tools` (`v***` is your VS version and `YYYY` is year when your VS was released) - Install Windows 11 or 10 SDK depending on your system, but ensure that at least `Windows 10 SDK version 2104 (10.0.20348.0)` is installed on your machine. You can download it from the [Windows SDK Archive](https://developer.microsoft.com/windows/downloads/windows-sdk/) +- Install [CMake](https://cmake.org/download) ## Backend dependencies diff --git a/script/linux b/script/linux index 051748ad4556e9..d894d33ea80cd7 100755 --- a/script/linux +++ b/script/linux @@ -25,6 +25,7 @@ if [[ -n $apt ]]; then libvulkan1 libgit2-dev make + cmake clang mold jq @@ -44,6 +45,7 @@ if [[ -n $dnf ]]; then gcc g++ clang + cmake mold alsa-lib-devel fontconfig-devel @@ -78,6 +80,7 @@ if [[ -n $zyp ]]; then gcc-c++ clang make + cmake alsa-devel fontconfig-devel wayland-devel @@ -99,6 +102,7 @@ if [[ -n $pacman ]]; then deps=( gcc clang + cmake alsa-lib fontconfig wayland @@ -122,6 +126,7 @@ if [[ -n $xbps ]]; then deps=( gettext-devel clang + cmake jq elfutils-devel gcc @@ -148,6 +153,7 @@ if [[ -n $emerge ]]; then app-arch/zstd dev-libs/openssl dev-libs/wayland + dev-util/cmake media-libs/alsa-lib media-libs/fontconfig media-libs/vulkan-loader From fc43b21e787adb365c09abc68bc3c602b842e74f Mon Sep 17 00:00:00 2001 From: ClanEver <562211524@qq.com> Date: Wed, 18 Sep 2024 02:10:37 +0800 Subject: [PATCH 151/762] ruff: Fix wrong Ruff path on Windows (#17883) Log: 2024-09-16T22:32:04.7715712+08:00 [ERROR] failed to start language server "ruff": failed to spawn command. path: "...\\AppData\\Local\\Zed\\extensions\\work\\ruff\\ruff-0.6.5\\ruff-x86_64-pc-windows-msvc\\ruff" The right path: `...\\AppData\\Local\\Zed\\extensions\\work\\ruff\\ruff-0.6.5\\ruff.exe` Release Notes: - N/A --- extensions/ruff/src/ruff.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/extensions/ruff/src/ruff.rs b/extensions/ruff/src/ruff.rs index c7c51ec7b9dd68..048c5893ca773d 100644 --- a/extensions/ruff/src/ruff.rs +++ b/extensions/ruff/src/ruff.rs @@ -89,7 +89,10 @@ impl RuffExtension { .ok_or_else(|| format!("no asset found matching {:?}", asset_name))?; let version_dir = format!("ruff-{}", release.version); - let binary_path = format!("{version_dir}/{asset_stem}/ruff"); + let binary_path = match platform { + zed::Os::Windows => format!("{version_dir}/ruff.exe"), + _ => format!("{version_dir}/{asset_stem}/ruff"), + }; if !fs::metadata(&binary_path).map_or(false, |stat| stat.is_file()) { zed::set_language_server_installation_status( From ab7a7d3480c9bc553c320911867236a8554ae0ef Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 14:12:11 -0400 Subject: [PATCH 152/762] docs: Mention how to open the Prompt Library (#17957) --- docs/src/assistant/prompting.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/assistant/prompting.md b/docs/src/assistant/prompting.md index f6f68880ba28ff..0dca671b47b79a 100644 --- a/docs/src/assistant/prompting.md +++ b/docs/src/assistant/prompting.md @@ -52,6 +52,8 @@ More on prompt engineering: Zed allows you to customize the default prompt used in new context editors. Or to be more precise, it uses a series of prompts that are combined to form the default prompt. +To edit prompts, select "Prompt Library" from the menu icon (three horizontal lines) in the upper right hand corner or using the `cmd-k l` keyboard shortcut. + A default prompt might look something like: ```plaintext From 869a72bb3f8b3786c92ec89fecc2c0f7cf96baf0 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 14:21:06 -0400 Subject: [PATCH 153/762] ruff: Bump to v0.1.0 (#17960) This PR bumps the Ruff extension to v0.1.0. Changes: - https://github.com/zed-industries/zed/pull/15852 - https://github.com/zed-industries/zed/pull/16955 - https://github.com/zed-industries/zed/pull/17883 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/ruff/Cargo.toml | 2 +- extensions/ruff/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6d6d8e85e6731a..6eeec558ec9381 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14516,7 +14516,7 @@ dependencies = [ [[package]] name = "zed_ruff" -version = "0.0.2" +version = "0.1.0" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/ruff/Cargo.toml b/extensions/ruff/Cargo.toml index 50e0ae3908e0e6..b6c31ebbc86100 100644 --- a/extensions/ruff/Cargo.toml +++ b/extensions/ruff/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_ruff" -version = "0.0.2" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/ruff/extension.toml b/extensions/ruff/extension.toml index d622b37c685e05..63929fc1911971 100644 --- a/extensions/ruff/extension.toml +++ b/extensions/ruff/extension.toml @@ -1,7 +1,7 @@ id = "ruff" name = "Ruff" description = "Support for Ruff, the Python linter and formatter" -version = "0.0.2" +version = "0.1.0" schema_version = 1 authors = [] repository = "https://github.com/zed-industries/zed" From 447a5d6e6e95c0bbc9fb008cb27de925a1fcf778 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 17 Sep 2024 11:35:45 -0700 Subject: [PATCH 154/762] Fix the rendering of warning text in our docs (#17958) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit cc: @danilo-leal Before: Screenshot 2024-09-17 at 10 53 13 AM After: Screenshot 2024-09-17 at 10 53 35 AM Light mode: Screenshot 2024-09-17 at 10 54 17 AM Release Notes: - N/A --- docs/theme/css/general.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index e6905b47bf485b..6f086a1052447b 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -268,7 +268,7 @@ blockquote .warning:before { .warning { margin: auto; padding: 1rem 1.25rem; - color: #000; + color: var(--fg); background-color: var(--warning-bg); border: 1px solid var(--warning-border); } From c28b22d1cfa3326a0336c69236baf63670473398 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 15:08:14 -0400 Subject: [PATCH 155/762] Update typos-cli to v1.24.6. Add scripts/check-spelling. Fix typos (#17961) --- .github/workflows/ci.yml | 11 +---------- crates/collab/src/db/ids.rs | 2 +- crates/editor/src/inlay_hint_cache.rs | 2 +- crates/language/src/buffer.rs | 2 +- crates/language/src/buffer_tests.rs | 2 +- crates/picker/src/picker.rs | 2 +- crates/terminal/src/terminal.rs | 2 +- crates/ui/src/components/icon.rs | 2 +- crates/zed/src/reliability.rs | 2 +- script/check-spelling | 11 +++++++++++ 10 files changed, 20 insertions(+), 18 deletions(-) create mode 100755 script/check-spelling diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 12b7dbd806e144..3ee4c045805dc6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,16 +39,7 @@ jobs: run: git clean -df - name: Check spelling - run: | - if ! cargo install --list | grep "typos-cli v$TYPOS_CLI_VERSION" > /dev/null; then - echo "Installing typos-cli@$TYPOS_CLI_VERSION..." - cargo install "typos-cli@$TYPOS_CLI_VERSION" - else - echo "typos-cli@$TYPOS_CLI_VERSION is already installed." - fi - typos - env: - TYPOS_CLI_VERSION: "1.23.3" + run: script/check-spelling - name: Run style checks uses: ./.github/actions/check_style diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs index 82ff8a56e5dc2d..1434bc07cf6c37 100644 --- a/crates/collab/src/db/ids.rs +++ b/crates/collab/src/db/ids.rs @@ -104,7 +104,7 @@ pub enum ChannelRole { /// Admin can read/write and change permissions. #[sea_orm(string_value = "admin")] Admin, - /// Member can read/write, but not change pemissions. + /// Member can read/write, but not change permissions. #[sea_orm(string_value = "member")] #[default] Member, diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index ce7ec5a4b098c9..62c5cde9d89f57 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -337,7 +337,7 @@ impl InlayHintCache { /// If needed, queries LSP for new inlay hints, using the invalidation strategy given. /// To reduce inlay hint jumping, attempts to query a visible range of the editor(s) first, /// followed by the delayed queries of the same range above and below the visible one. - /// This way, concequent refresh invocations are less likely to trigger LSP queries for the invisible ranges. + /// This way, subsequent refresh invocations are less likely to trigger LSP queries for the invisible ranges. pub(super) fn spawn_hint_refresh( &mut self, reason_description: &'static str, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 59b2670dafd84e..76058ffd9bac9a 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -3025,7 +3025,7 @@ impl BufferSnapshot { let mut start = text.len(); let end = start + buffer_range.len(); - // When multiple names are captured, then the matcheable text + // When multiple names are captured, then the matchable text // includes the whitespace in between the names. if !name_ranges.is_empty() { start -= 1; diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 5162269f4f6be8..50dea8d2562b00 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -382,7 +382,7 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(text, cx)); // Spawn a task to format the buffer's whitespace. - // Pause so that the foratting task starts running. + // Pause so that the formatting task starts running. let format = buffer.update(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx)); smol::future::yield_now().await; diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index dc661d91fbabab..8350be2b20e07d 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -509,7 +509,7 @@ impl Picker { .on_mouse_up( MouseButton::Right, cx.listener(move |this, event: &MouseUpEvent, cx| { - // We specficially want to use the platform key here, as + // We specifically want to use the platform key here, as // ctrl will already be held down for the tab switcher. this.handle_click(ix, event.modifiers.platform, cx) }), diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 12a7349af07a93..8f8982f02c4701 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -1619,7 +1619,7 @@ fn task_summary(task: &TaskState, error_code: Option) -> (bool, String, Str /// the cursor's `point` is not updated to the new line and column values /// /// * ??? there could be more consequences, and any further "proper" streaming from the PTY might bug and/or panic. -/// Still, concequent `append_text_to_term` invocations are possible and display the contents correctly. +/// Still, subsequent `append_text_to_term` invocations are possible and display the contents correctly. /// /// Despite the quirks, this is the simplest approach to appending text to the terminal: its alternative, `grid_mut` manipulations, /// do not properly set the scrolling state and display odd text after appending; also those manipulations are more tedious and error-prone. diff --git a/crates/ui/src/components/icon.rs b/crates/ui/src/components/icon.rs index a71c3e98720917..323181e84156f4 100644 --- a/crates/ui/src/components/icon.rs +++ b/crates/ui/src/components/icon.rs @@ -46,7 +46,7 @@ impl RenderOnce for AnyIcon { /// The decoration for an icon. /// /// For example, this can show an indicator, an "x", -/// or a diagonal strkethrough to indicate something is disabled. +/// or a diagonal strikethrough to indicate something is disabled. #[derive(Debug, PartialEq, Copy, Clone, EnumIter)] pub enum IconDecoration { Strikethrough, diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 9731401b3024e0..188cf417f7c38b 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -176,7 +176,7 @@ pub fn monitor_main_thread_hangs( let background_executor = cx.background_executor(); let telemetry_settings = *client::TelemetrySettings::get_global(cx); - // Initialize SIGUSR2 handler to send a backrace to a channel. + // Initialize SIGUSR2 handler to send a backtrace to a channel. let (backtrace_tx, backtrace_rx) = mpsc::channel(); static BACKTRACE: Mutex> = Mutex::new(Vec::new()); static BACKTRACE_SENDER: OnceLock> = OnceLock::new(); diff --git a/script/check-spelling b/script/check-spelling new file mode 100755 index 00000000000000..65c674057f47f7 --- /dev/null +++ b/script/check-spelling @@ -0,0 +1,11 @@ +#!/bin/sh + +TYPOS_CLI_VERSION=1.24.6 + +if ! cargo install --list | grep "typos-cli v$TYPOS_CLI_VERSION" > /dev/null; then + echo "Installing typos-cli@$TYPOS_CLI_VERSION..." + cargo install "typos-cli@$TYPOS_CLI_VERSION" +else + echo "typos-cli@$TYPOS_CLI_VERSION is already installed." +fi +typos From 06a13c298395f543e6e2e1dfd33c7a7ebe37f59e Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 15:33:28 -0400 Subject: [PATCH 156/762] svelte: Bump to v0.2.0 (#17962) This PR bumps the Svelte extension to v0.2.0. Changes: - https://github.com/zed-industries/zed/pull/17529 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/svelte/Cargo.toml | 2 +- extensions/svelte/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6eeec558ec9381..3b3a370c36154e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14531,7 +14531,7 @@ dependencies = [ [[package]] name = "zed_svelte" -version = "0.1.1" +version = "0.2.0" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/svelte/Cargo.toml b/extensions/svelte/Cargo.toml index d07d517af66a77..09ad78aec2db8b 100644 --- a/extensions/svelte/Cargo.toml +++ b/extensions/svelte/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_svelte" -version = "0.1.1" +version = "0.2.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/svelte/extension.toml b/extensions/svelte/extension.toml index 694fdec2a67c56..a4ecf460a85ad4 100644 --- a/extensions/svelte/extension.toml +++ b/extensions/svelte/extension.toml @@ -1,7 +1,7 @@ id = "svelte" name = "Svelte" description = "Svelte support" -version = "0.1.1" +version = "0.2.0" schema_version = 1 authors = [] repository = "https://github.com/zed-extensions/svelte" From d3d3a093b4a757f1e82cd870a7f9e3cbee9ae933 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 17 Sep 2024 12:44:33 -0700 Subject: [PATCH 157/762] Add an `eval` binary that evaluates our semantic index against CodeSearchNet (#17375) This PR is the beginning of an evaluation framework for our AI features. Right now, we're evaluating our semantic search feature against the [CodeSearchNet](https://github.com/github/CodeSearchNet) code search dataset. This dataset is very limited (for the most part, only 1 known good search result per repo) but it has surfaced some problems with our search already. Release Notes: - N/A --------- Co-authored-by: Jason Co-authored-by: Jason Mancuso <7891333+jvmncs@users.noreply.github.com> Co-authored-by: Nathan Co-authored-by: Richard --- .github/workflows/ci.yml | 17 +- Cargo.lock | 27 + Cargo.toml | 1 + crates/assistant/src/assistant_panel.rs | 2 +- .../src/slash_command/file_command.rs | 9 +- .../src/slash_command/search_command.rs | 54 +- crates/evals/Cargo.toml | 37 + crates/evals/LICENSE-GPL | 1 + crates/evals/build.rs | 14 + crates/evals/src/eval.rs | 631 ++++++++++++++++++ crates/http_client/src/http_client.rs | 3 +- crates/semantic_index/src/embedding_index.rs | 89 ++- crates/semantic_index/src/project_index.rs | 17 +- crates/semantic_index/src/semantic_index.rs | 125 ++-- 14 files changed, 882 insertions(+), 145 deletions(-) create mode 100644 crates/evals/Cargo.toml create mode 120000 crates/evals/LICENSE-GPL create mode 100644 crates/evals/build.rs create mode 100644 crates/evals/src/eval.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3ee4c045805dc6..c55a3a9907f47f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -101,7 +101,7 @@ jobs: timeout-minutes: 60 name: (Linux) Run Clippy and tests runs-on: - - hosted-linux-x86-1 + - buildjet-16vcpu-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH @@ -111,6 +111,11 @@ jobs: with: clean: false + - name: Cache dependencies + uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - name: Install Linux dependencies run: ./script/linux @@ -264,7 +269,7 @@ jobs: timeout-minutes: 60 name: Create a Linux bundle runs-on: - - hosted-linux-x86-1 + - buildjet-16vcpu-ubuntu-2204 if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} needs: [linux_tests] env: @@ -279,9 +284,6 @@ jobs: - name: Install Linux dependencies run: ./script/linux - - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 100 - - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | @@ -335,7 +337,7 @@ jobs: timeout-minutes: 60 name: Create arm64 Linux bundle runs-on: - - hosted-linux-arm-1 + - buildjet-16vcpu-ubuntu-2204-arm if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} needs: [linux_tests] env: @@ -350,9 +352,6 @@ jobs: - name: Install Linux dependencies run: ./script/linux - - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 100 - - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | diff --git a/Cargo.lock b/Cargo.lock index 3b3a370c36154e..9cff8953932ade 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4000,6 +4000,33 @@ dependencies = [ "num-traits", ] +[[package]] +name = "evals" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "client", + "clock", + "collections", + "env_logger", + "feature_flags", + "fs", + "git", + "gpui", + "http_client", + "language", + "languages", + "node_runtime", + "open_ai", + "project", + "semantic_index", + "serde", + "serde_json", + "settings", + "smol", +] + [[package]] name = "event-listener" version = "2.5.3" diff --git a/Cargo.toml b/Cargo.toml index e1af231c7e4fee..eea510edf2e0e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ members = [ "crates/diagnostics", "crates/docs_preprocessor", "crates/editor", + "crates/evals", "crates/extension", "crates/extension_api", "crates/extension_cli", diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 52838b5c77b190..6eaa86f4a7aff9 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -3282,7 +3282,7 @@ impl ContextEditor { let fence = codeblock_fence_for_path( filename.as_deref(), - Some(selection.start.row..selection.end.row), + Some(selection.start.row..=selection.end.row), ); if let Some((line_comment_prefix, outline_text)) = diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index 0df8b5d4e0b83c..260c6b0e2a0840 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -8,7 +8,7 @@ use project::{PathMatchCandidateSet, Project}; use serde::{Deserialize, Serialize}; use std::{ fmt::Write, - ops::Range, + ops::{Range, RangeInclusive}, path::{Path, PathBuf}, sync::{atomic::AtomicBool, Arc}, }; @@ -342,7 +342,10 @@ fn collect_files( }) } -pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option>) -> String { +pub fn codeblock_fence_for_path( + path: Option<&Path>, + row_range: Option>, +) -> String { let mut text = String::new(); write!(text, "```").unwrap(); @@ -357,7 +360,7 @@ pub fn codeblock_fence_for_path(path: Option<&Path>, row_range: Option, + }, +} + +#[derive(Clone, Deserialize, Serialize)] +struct EvaluationProject { + repo: String, + sha: String, + queries: Vec, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +struct EvaluationQuery { + query: String, + expected_results: Vec, +} + +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] +struct EvaluationSearchResult { + file: String, + lines: RangeInclusive, +} + +#[derive(Clone, Deserialize, Serialize)] +struct EvaluationProjectOutcome { + repo: String, + sha: String, + queries: Vec, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +struct EvaluationQueryOutcome { + repo: String, + query: String, + expected_results: Vec, + actual_results: Vec, + covered_file_count: usize, + overlapped_result_count: usize, + covered_result_count: usize, + total_result_count: usize, + covered_result_indices: Vec, +} + +fn main() -> Result<()> { + let cli = Cli::parse(); + env_logger::init(); + + gpui::App::headless().run(move |cx| { + let executor = cx.background_executor().clone(); + + match cli.command { + Commands::Fetch {} => { + executor + .clone() + .spawn(async move { + if let Err(err) = fetch_evaluation_resources(&executor).await { + eprintln!("Error: {}", err); + exit(1); + } + exit(0); + }) + .detach(); + } + Commands::Run { repo } => { + cx.spawn(|mut cx| async move { + if let Err(err) = run_evaluation(repo, &executor, &mut cx).await { + eprintln!("Error: {}", err); + exit(1); + } + exit(0); + }) + .detach(); + } + } + }); + + Ok(()) +} + +async fn fetch_evaluation_resources(executor: &BackgroundExecutor) -> Result<()> { + let http_client = http_client::HttpClientWithProxy::new(None, None); + fetch_code_search_net_resources(&http_client).await?; + fetch_eval_repos(executor, &http_client).await?; + Ok(()) +} + +async fn fetch_code_search_net_resources(http_client: &dyn HttpClient) -> Result<()> { + eprintln!("Fetching CodeSearchNet evaluations..."); + + let annotations_url = "https://raw.githubusercontent.com/github/CodeSearchNet/master/resources/annotationStore.csv"; + + let dataset_dir = Path::new(CODESEARCH_NET_DIR); + fs::create_dir_all(&dataset_dir).expect("failed to create CodeSearchNet directory"); + + // Fetch the annotations CSV, which contains the human-annotated search relevances + let annotations_path = dataset_dir.join("annotations.csv"); + let annotations_csv_content = if annotations_path.exists() { + fs::read_to_string(&annotations_path).expect("failed to read annotations") + } else { + let response = http_client + .get(annotations_url, Default::default(), true) + .await + .expect("failed to fetch annotations csv"); + let mut body = String::new(); + response + .into_body() + .read_to_string(&mut body) + .await + .expect("failed to read annotations.csv response"); + fs::write(annotations_path, &body).expect("failed to write annotations.csv"); + body + }; + + // Parse the annotations CSV. Skip over queries with zero relevance. + let rows = annotations_csv_content.lines().filter_map(|line| { + let mut values = line.split(','); + let _language = values.next()?; + let query = values.next()?; + let github_url = values.next()?; + let score = values.next()?; + + if score == "0" { + return None; + } + + let url_path = github_url.strip_prefix("https://github.com/")?; + let (url_path, hash) = url_path.split_once('#')?; + let (repo_name, url_path) = url_path.split_once("/blob/")?; + let (sha, file_path) = url_path.split_once('/')?; + let line_range = if let Some((start, end)) = hash.split_once('-') { + start.strip_prefix("L")?.parse::().ok()?..=end.strip_prefix("L")?.parse().ok()? + } else { + let row = hash.strip_prefix("L")?.parse().ok()?; + row..=row + }; + Some((repo_name, sha, query, file_path, line_range)) + }); + + // Group the annotations by repo and sha. + let mut evaluations_by_repo = BTreeMap::new(); + for (repo_name, sha, query, file_path, lines) in rows { + let evaluation_project = evaluations_by_repo + .entry((repo_name, sha)) + .or_insert_with(|| EvaluationProject { + repo: repo_name.to_string(), + sha: sha.to_string(), + queries: Vec::new(), + }); + + let ix = evaluation_project + .queries + .iter() + .position(|entry| entry.query == query) + .unwrap_or_else(|| { + evaluation_project.queries.push(EvaluationQuery { + query: query.to_string(), + expected_results: Vec::new(), + }); + evaluation_project.queries.len() - 1 + }); + let results = &mut evaluation_project.queries[ix].expected_results; + let result = EvaluationSearchResult { + file: file_path.to_string(), + lines, + }; + if !results.contains(&result) { + results.push(result); + } + } + + let evaluations = evaluations_by_repo.into_values().collect::>(); + let evaluations_path = dataset_dir.join("evaluations.json"); + fs::write( + &evaluations_path, + serde_json::to_vec_pretty(&evaluations).unwrap(), + ) + .unwrap(); + + eprintln!( + "Fetched CodeSearchNet evaluations into {}", + evaluations_path.display() + ); + + Ok(()) +} + +async fn run_evaluation( + only_repo: Option, + executor: &BackgroundExecutor, + cx: &mut AsyncAppContext, +) -> Result<()> { + cx.update(|cx| { + let mut store = SettingsStore::new(cx); + store + .set_default_settings(settings::default_settings().as_ref(), cx) + .unwrap(); + cx.set_global(store); + client::init_settings(cx); + language::init(cx); + Project::init_settings(cx); + cx.update_flags(false, vec![]); + }) + .unwrap(); + + let dataset_dir = Path::new(CODESEARCH_NET_DIR); + let evaluations_path = dataset_dir.join("evaluations.json"); + let repos_dir = Path::new(EVAL_REPOS_DIR); + let db_path = Path::new(EVAL_DB_PATH); + let http_client = http_client::HttpClientWithProxy::new(None, None); + let api_key = std::env::var("OPENAI_API_KEY").unwrap(); + let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); + let fs = Arc::new(RealFs::new(git_hosting_provider_registry, None)) as Arc; + let clock = Arc::new(RealSystemClock); + let client = cx + .update(|cx| { + Client::new( + clock, + Arc::new(http_client::HttpClientWithUrl::new( + "https://zed.dev", + None, + None, + )), + cx, + ) + }) + .unwrap(); + let user_store = cx + .new_model(|cx| UserStore::new(client.clone(), cx)) + .unwrap(); + let node_runtime = Arc::new(FakeNodeRuntime {}); + + let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json"); + let evaluations: Vec = serde_json::from_slice(&evaluations).unwrap(); + + let embedding_provider = Arc::new(OpenAiEmbeddingProvider::new( + http_client.clone(), + OpenAiEmbeddingModel::TextEmbedding3Small, + open_ai::OPEN_AI_API_URL.to_string(), + api_key, + )); + + let language_registry = Arc::new(LanguageRegistry::new(executor.clone())); + cx.update(|cx| languages::init(language_registry.clone(), node_runtime.clone(), cx)) + .unwrap(); + + let mut covered_result_count = 0; + let mut overlapped_result_count = 0; + let mut covered_file_count = 0; + let mut total_result_count = 0; + eprint!("Running evals."); + + for evaluation_project in evaluations { + if only_repo + .as_ref() + .map_or(false, |only_repo| only_repo != &evaluation_project.repo) + { + continue; + } + + eprint!("\r\x1B[2K"); + eprint!( + "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. Project: {}...", + covered_result_count, + total_result_count, + overlapped_result_count, + total_result_count, + covered_file_count, + total_result_count, + evaluation_project.repo + ); + + let repo_db_path = + db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_"))); + let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider.clone(), cx) + .await + .unwrap(); + + let repo_dir = repos_dir.join(&evaluation_project.repo); + if !repo_dir.exists() || repo_dir.join(SKIP_EVAL_PATH).exists() { + eprintln!("Skipping {}: directory not found", evaluation_project.repo); + continue; + } + + let project = cx + .update(|cx| { + Project::local( + client.clone(), + node_runtime.clone(), + user_store.clone(), + language_registry.clone(), + fs.clone(), + None, + cx, + ) + }) + .unwrap(); + + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree(repo_dir, true, cx) + })? + .await?; + + worktree + .update(cx, |worktree, _| { + worktree.as_local().unwrap().scan_complete() + }) + .unwrap() + .await; + + let project_index = cx + .update(|cx| semantic_index.create_project_index(project.clone(), cx)) + .unwrap(); + wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await; + + for query in evaluation_project.queries { + let results = cx + .update(|cx| { + let project_index = project_index.read(cx); + project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx) + }) + .unwrap() + .await + .unwrap(); + + let results = SemanticDb::load_results(results, &fs.clone(), &cx) + .await + .unwrap(); + + let mut project_covered_result_count = 0; + let mut project_overlapped_result_count = 0; + let mut project_covered_file_count = 0; + let mut covered_result_indices = Vec::new(); + for expected_result in &query.expected_results { + let mut file_matched = false; + let mut range_overlapped = false; + let mut range_covered = false; + + for (ix, result) in results.iter().enumerate() { + if result.path.as_ref() == Path::new(&expected_result.file) { + file_matched = true; + let start_matched = + result.row_range.contains(&expected_result.lines.start()); + let end_matched = result.row_range.contains(&expected_result.lines.end()); + + if start_matched || end_matched { + range_overlapped = true; + } + + if start_matched && end_matched { + range_covered = true; + covered_result_indices.push(ix); + break; + } + } + } + + if range_covered { + project_covered_result_count += 1 + }; + if range_overlapped { + project_overlapped_result_count += 1 + }; + if file_matched { + project_covered_file_count += 1 + }; + } + let outcome_repo = evaluation_project.repo.clone(); + + let query_results = EvaluationQueryOutcome { + repo: outcome_repo, + query: query.query, + total_result_count: query.expected_results.len(), + covered_result_count: project_covered_result_count, + overlapped_result_count: project_overlapped_result_count, + covered_file_count: project_covered_file_count, + expected_results: query.expected_results, + actual_results: results + .iter() + .map(|result| EvaluationSearchResult { + file: result.path.to_string_lossy().to_string(), + lines: result.row_range.clone(), + }) + .collect(), + covered_result_indices, + }; + + overlapped_result_count += query_results.overlapped_result_count; + covered_result_count += query_results.covered_result_count; + covered_file_count += query_results.covered_file_count; + total_result_count += query_results.total_result_count; + + println!("{}", serde_json::to_string(&query_results).unwrap()); + } + } + + eprint!( + "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured.", + covered_result_count, + total_result_count, + overlapped_result_count, + total_result_count, + covered_file_count, + total_result_count, + ); + + Ok(()) +} + +async fn wait_for_indexing_complete( + project_index: &Model, + cx: &mut AsyncAppContext, + timeout: Option, +) { + let (tx, rx) = bounded(1); + let subscription = cx.update(|cx| { + cx.subscribe(project_index, move |_, event, _| { + if let Status::Idle = event { + let _ = tx.try_send(*event); + } + }) + }); + + let result = match timeout { + Some(timeout_duration) => { + smol::future::or( + async { + rx.recv().await.map_err(|_| ())?; + Ok(()) + }, + async { + Timer::after(timeout_duration).await; + Err(()) + }, + ) + .await + } + None => rx.recv().await.map(|_| ()).map_err(|_| ()), + }; + + match result { + Ok(_) => (), + Err(_) => { + if let Some(timeout) = timeout { + eprintln!("Timeout: Indexing did not complete within {:?}", timeout); + } + } + } + + drop(subscription); +} + +async fn fetch_eval_repos( + executor: &BackgroundExecutor, + http_client: &dyn HttpClient, +) -> Result<()> { + let dataset_dir = Path::new(CODESEARCH_NET_DIR); + let evaluations_path = dataset_dir.join("evaluations.json"); + let repos_dir = Path::new(EVAL_REPOS_DIR); + + let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json"); + let evaluations: Vec = serde_json::from_slice(&evaluations).unwrap(); + + eprint!("Fetching evaluation repositories..."); + + executor + .scoped(move |scope| { + let done_count = Arc::new(AtomicUsize::new(0)); + let len = evaluations.len(); + for chunk in evaluations.chunks(evaluations.len() / 8) { + let chunk = chunk.to_vec(); + let done_count = done_count.clone(); + scope.spawn(async move { + for EvaluationProject { repo, sha, .. } in chunk { + eprint!( + "\rFetching evaluation repositories ({}/{})...", + done_count.load(SeqCst), + len, + ); + + fetch_eval_repo(repo, sha, repos_dir, http_client).await; + done_count.fetch_add(1, SeqCst); + } + }); + } + }) + .await; + + Ok(()) +} + +async fn fetch_eval_repo( + repo: String, + sha: String, + repos_dir: &Path, + http_client: &dyn HttpClient, +) { + let Some((owner, repo_name)) = repo.split_once('/') else { + return; + }; + let repo_dir = repos_dir.join(owner).join(repo_name); + fs::create_dir_all(&repo_dir).unwrap(); + let skip_eval_path = repo_dir.join(SKIP_EVAL_PATH); + if skip_eval_path.exists() { + return; + } + if let Ok(head_content) = fs::read_to_string(&repo_dir.join(".git").join("HEAD")) { + if head_content.trim() == sha { + return; + } + } + let repo_response = http_client + .send( + http_client::Request::builder() + .method(Method::HEAD) + .uri(format!("https://github.com/{}", repo)) + .body(Default::default()) + .expect(""), + ) + .await + .expect("failed to check github repo"); + if !repo_response.status().is_success() && !repo_response.status().is_redirection() { + fs::write(&skip_eval_path, "").unwrap(); + eprintln!( + "Repo {repo} is no longer public ({:?}). Skipping", + repo_response.status() + ); + return; + } + if !repo_dir.join(".git").exists() { + let init_output = Command::new("git") + .current_dir(&repo_dir) + .args(&["init"]) + .output() + .unwrap(); + if !init_output.status.success() { + eprintln!( + "Failed to initialize git repository for {}: {}", + repo, + String::from_utf8_lossy(&init_output.stderr) + ); + return; + } + } + let url = format!("https://github.com/{}.git", repo); + Command::new("git") + .current_dir(&repo_dir) + .args(&["remote", "add", "-f", "origin", &url]) + .stdin(Stdio::null()) + .output() + .unwrap(); + let fetch_output = Command::new("git") + .current_dir(&repo_dir) + .args(&["fetch", "--depth", "1", "origin", &sha]) + .stdin(Stdio::null()) + .output() + .unwrap(); + if !fetch_output.status.success() { + eprintln!( + "Failed to fetch {} for {}: {}", + sha, + repo, + String::from_utf8_lossy(&fetch_output.stderr) + ); + return; + } + let checkout_output = Command::new("git") + .current_dir(&repo_dir) + .args(&["checkout", &sha]) + .output() + .unwrap(); + + if !checkout_output.status.success() { + eprintln!( + "Failed to checkout {} for {}: {}", + sha, + repo, + String::from_utf8_lossy(&checkout_output.stderr) + ); + } +} diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 1841a1f394627d..7ea0029d79029f 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -5,6 +5,7 @@ use derive_more::Deref; use futures::future::BoxFuture; use futures_lite::FutureExt; use isahc::config::{Configurable, RedirectPolicy}; +pub use isahc::http; pub use isahc::{ http::{Method, StatusCode, Uri}, AsyncBody, Error, HttpClient as IsahcHttpClient, Request, Response, @@ -226,7 +227,7 @@ pub fn client(user_agent: Option, proxy: Option) -> Arc { - let language = language_registry - .language_for_file_path(&entry.path) - .await - .ok(); - let chunked_file = ChunkedFile { - chunks: chunking::chunk_text( - &text, - language.as_ref(), - &entry.path, - ), - handle, - path: entry.path, - mtime: entry.mtime, - text, - }; - - if chunked_files_tx.send(chunked_file).await.is_err() { - return; - } - } - Err(_)=> { - log::error!("Failed to read contents into a UTF-8 string: {entry_abs_path:?}"); + if let Some(text) = fs.load(&entry_abs_path).await.ok() { + let language = language_registry + .language_for_file_path(&entry.path) + .await + .ok(); + let chunked_file = ChunkedFile { + chunks: chunking::chunk_text( + &text, + language.as_ref(), + &entry.path, + ), + handle, + path: entry.path, + mtime: entry.mtime, + text, + }; + + if chunked_files_tx.send(chunked_file).await.is_err() { + return; } } } @@ -358,33 +353,37 @@ impl EmbeddingIndex { fn persist_embeddings( &self, mut deleted_entry_ranges: channel::Receiver<(Bound, Bound)>, - embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, + mut embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>, cx: &AppContext, ) -> Task> { let db_connection = self.db_connection.clone(); let db = self.db; - cx.background_executor().spawn(async move { - while let Some(deletion_range) = deleted_entry_ranges.next().await { - let mut txn = db_connection.write_txn()?; - let start = deletion_range.0.as_ref().map(|start| start.as_str()); - let end = deletion_range.1.as_ref().map(|end| end.as_str()); - log::debug!("deleting embeddings in range {:?}", &(start, end)); - db.delete_range(&mut txn, &(start, end))?; - txn.commit()?; - } - let mut embedded_files = embedded_files.chunks_timeout(4096, Duration::from_secs(2)); - while let Some(embedded_files) = embedded_files.next().await { - let mut txn = db_connection.write_txn()?; - for (file, _) in &embedded_files { - log::debug!("saving embedding for file {:?}", file.path); - let key = db_key_for_path(&file.path); - db.put(&mut txn, &key, file)?; + cx.background_executor().spawn(async move { + loop { + // Interleave deletions and persists of embedded files + futures::select_biased! { + deletion_range = deleted_entry_ranges.next() => { + if let Some(deletion_range) = deletion_range { + let mut txn = db_connection.write_txn()?; + let start = deletion_range.0.as_ref().map(|start| start.as_str()); + let end = deletion_range.1.as_ref().map(|end| end.as_str()); + log::debug!("deleting embeddings in range {:?}", &(start, end)); + db.delete_range(&mut txn, &(start, end))?; + txn.commit()?; + } + }, + file = embedded_files.next() => { + if let Some((file, _)) = file { + let mut txn = db_connection.write_txn()?; + log::debug!("saving embedding for file {:?}", file.path); + let key = db_key_for_path(&file.path); + db.put(&mut txn, &key, &file)?; + txn.commit()?; + } + }, + complete => break, } - txn.commit()?; - - drop(embedded_files); - log::debug!("committed"); } Ok(()) diff --git a/crates/semantic_index/src/project_index.rs b/crates/semantic_index/src/project_index.rs index 84a72c1a3d8c61..5c35c93fa943a7 100644 --- a/crates/semantic_index/src/project_index.rs +++ b/crates/semantic_index/src/project_index.rs @@ -15,7 +15,14 @@ use log; use project::{Project, Worktree, WorktreeId}; use serde::{Deserialize, Serialize}; use smol::channel; -use std::{cmp::Ordering, future::Future, num::NonZeroUsize, ops::Range, path::Path, sync::Arc}; +use std::{ + cmp::Ordering, + future::Future, + num::NonZeroUsize, + ops::{Range, RangeInclusive}, + path::{Path, PathBuf}, + sync::Arc, +}; use util::ResultExt; #[derive(Debug)] @@ -26,6 +33,14 @@ pub struct SearchResult { pub score: f32, } +pub struct LoadedSearchResult { + pub path: Arc, + pub range: Range, + pub full_path: PathBuf, + pub file_content: String, + pub row_range: RangeInclusive, +} + pub struct WorktreeSearchResult { pub worktree_id: WorktreeId, pub path: Arc, diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index f2b325ead690b3..3435d0a9ca4f79 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -10,14 +10,16 @@ mod worktree_index; use anyhow::{Context as _, Result}; use collections::HashMap; +use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Context, Global, Model, WeakModel}; use project::Project; -use project_index::ProjectIndex; use std::{path::PathBuf, sync::Arc}; use ui::ViewContext; +use util::ResultExt as _; use workspace::Workspace; pub use embedding::*; +pub use project_index::{LoadedSearchResult, ProjectIndex, SearchResult, Status}; pub use project_index_debug_view::ProjectIndexDebugView; pub use summary_index::FileSummary; @@ -56,27 +58,7 @@ impl SemanticDb { if cx.has_global::() { cx.update_global::(|this, cx| { - let project_index = cx.new_model(|cx| { - ProjectIndex::new( - project.clone(), - this.db_connection.clone(), - this.embedding_provider.clone(), - cx, - ) - }); - - let project_weak = project.downgrade(); - this.project_indices - .insert(project_weak.clone(), project_index); - - cx.on_release(move |_, _, cx| { - if cx.has_global::() { - cx.update_global::(|this, _| { - this.project_indices.remove(&project_weak); - }) - } - }) - .detach(); + this.create_project_index(project, cx); }) } else { log::info!("No SemanticDb, skipping project index") @@ -94,6 +76,50 @@ impl SemanticDb { }) } + pub async fn load_results( + results: Vec, + fs: &Arc, + cx: &AsyncAppContext, + ) -> Result> { + let mut loaded_results = Vec::new(); + for result in results { + let (full_path, file_content) = result.worktree.read_with(cx, |worktree, _cx| { + let entry_abs_path = worktree.abs_path().join(&result.path); + let mut entry_full_path = PathBuf::from(worktree.root_name()); + entry_full_path.push(&result.path); + let file_content = async { + let entry_abs_path = entry_abs_path; + fs.load(&entry_abs_path).await + }; + (entry_full_path, file_content) + })?; + if let Some(file_content) = file_content.await.log_err() { + let range_start = result.range.start.min(file_content.len()); + let range_end = result.range.end.min(file_content.len()); + + let start_row = file_content[0..range_start].matches('\n').count() as u32; + let end_row = file_content[0..range_end].matches('\n').count() as u32; + let start_line_byte_offset = file_content[0..range_start] + .rfind('\n') + .map(|pos| pos + 1) + .unwrap_or_default(); + let end_line_byte_offset = file_content[range_end..] + .find('\n') + .map(|pos| range_end + pos) + .unwrap_or_else(|| file_content.len()); + + loaded_results.push(LoadedSearchResult { + path: result.path, + range: start_line_byte_offset..end_line_byte_offset, + full_path, + file_content, + row_range: start_row..=end_row, + }); + } + } + Ok(loaded_results) + } + pub fn project_index( &mut self, project: Model, @@ -113,6 +139,36 @@ impl SemanticDb { }) }) } + + pub fn create_project_index( + &mut self, + project: Model, + cx: &mut AppContext, + ) -> Model { + let project_index = cx.new_model(|cx| { + ProjectIndex::new( + project.clone(), + self.db_connection.clone(), + self.embedding_provider.clone(), + cx, + ) + }); + + let project_weak = project.downgrade(); + self.project_indices + .insert(project_weak.clone(), project_index.clone()); + + cx.observe_release(&project, move |_, cx| { + if cx.has_global::() { + cx.update_global::(|this, _| { + this.project_indices.remove(&project_weak); + }) + } + }) + .detach(); + + project_index + } } #[cfg(test)] @@ -230,34 +286,13 @@ mod tests { let project = Project::test(fs, [project_path], cx).await; - cx.update(|cx| { + let project_index = cx.update(|cx| { let language_registry = project.read(cx).languages().clone(); let node_runtime = project.read(cx).node_runtime().unwrap().clone(); languages::init(language_registry, node_runtime, cx); - - // Manually create and insert the ProjectIndex - let project_index = cx.new_model(|cx| { - ProjectIndex::new( - project.clone(), - semantic_index.db_connection.clone(), - semantic_index.embedding_provider.clone(), - cx, - ) - }); - semantic_index - .project_indices - .insert(project.downgrade(), project_index); + semantic_index.create_project_index(project.clone(), cx) }); - let project_index = cx - .update(|_cx| { - semantic_index - .project_indices - .get(&project.downgrade()) - .cloned() - }) - .unwrap(); - cx.run_until_parked(); while cx .update(|cx| semantic_index.remaining_summaries(&project.downgrade(), cx)) From 8a6c65c63b6b2b8071ad2380ea392e4f51226c8f Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Tue, 17 Sep 2024 22:49:12 +0300 Subject: [PATCH 158/762] Allow task context providers to access project env (#17964) Closes #13106 Release Notes: - Task context providers now have access to the local shell environment, allowing local rust tool installations to work Before: Screenshot 2024-09-17 at 22 09 38 After: Screenshot 2024-09-17 at 22 09 58 --- crates/language/src/task_context.rs | 1 + crates/languages/src/go.rs | 2 ++ crates/languages/src/python.rs | 2 ++ crates/languages/src/rust.rs | 31 +++++++++++++++++++++------- crates/project/src/project.rs | 26 ++++++++++++----------- crates/project/src/task_inventory.rs | 3 ++- 6 files changed, 45 insertions(+), 20 deletions(-) diff --git a/crates/language/src/task_context.rs b/crates/language/src/task_context.rs index cc3f29558e0fbb..73150eb8e70d5c 100644 --- a/crates/language/src/task_context.rs +++ b/crates/language/src/task_context.rs @@ -25,6 +25,7 @@ pub trait ContextProvider: Send + Sync { &self, _variables: &TaskVariables, _location: &Location, + _project_env: Option<&HashMap>, _cx: &mut AppContext, ) -> Result { Ok(TaskVariables::default()) diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index a83a11fd49ee34..2ddf7796817e81 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -1,5 +1,6 @@ use anyhow::{anyhow, Context, Result}; use async_trait::async_trait; +use collections::HashMap; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext, Task}; use http_client::github::latest_github_release; @@ -454,6 +455,7 @@ impl ContextProvider for GoContextProvider { &self, variables: &TaskVariables, location: &Location, + _: Option<&HashMap>, cx: &mut gpui::AppContext, ) -> Result { let local_abs_path = location diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 99018647ead465..ee127c00cca846 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -1,5 +1,6 @@ use anyhow::Result; use async_trait::async_trait; +use collections::HashMap; use gpui::AppContext; use gpui::AsyncAppContext; use language::{ContextProvider, LanguageServerName, LspAdapter, LspAdapterDelegate}; @@ -215,6 +216,7 @@ impl ContextProvider for PythonContextProvider { &self, variables: &task::TaskVariables, _location: &project::Location, + _: Option<&HashMap>, _cx: &mut gpui::AppContext, ) -> Result { let python_module_name = python_module_name_from_relative_path( diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 5055bb69c1e6f5..b55f350b9d9187 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -1,6 +1,7 @@ use anyhow::{anyhow, bail, Context, Result}; use async_compression::futures::bufread::GzipDecoder; use async_trait::async_trait; +use collections::HashMap; use futures::{io::BufReader, StreamExt}; use gpui::{AppContext, AsyncAppContext}; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; @@ -434,6 +435,7 @@ impl ContextProvider for RustContextProvider { &self, task_variables: &TaskVariables, location: &Location, + project_env: Option<&HashMap>, cx: &mut gpui::AppContext, ) -> Result { let local_abs_path = location @@ -449,8 +451,8 @@ impl ContextProvider for RustContextProvider { .is_some(); if is_main_function { - if let Some((package_name, bin_name)) = - local_abs_path.and_then(package_name_and_bin_name_from_abs_path) + if let Some((package_name, bin_name)) = local_abs_path + .and_then(|path| package_name_and_bin_name_from_abs_path(path, project_env)) { return Ok(TaskVariables::from_iter([ (RUST_PACKAGE_TASK_VARIABLE.clone(), package_name), @@ -461,7 +463,7 @@ impl ContextProvider for RustContextProvider { if let Some(package_name) = local_abs_path .and_then(|local_abs_path| local_abs_path.parent()) - .and_then(human_readable_package_name) + .and_then(|path| human_readable_package_name(path, project_env)) { return Ok(TaskVariables::from_iter([( RUST_PACKAGE_TASK_VARIABLE.clone(), @@ -615,8 +617,15 @@ struct CargoTarget { src_path: String, } -fn package_name_and_bin_name_from_abs_path(abs_path: &Path) -> Option<(String, String)> { - let output = std::process::Command::new("cargo") +fn package_name_and_bin_name_from_abs_path( + abs_path: &Path, + project_env: Option<&HashMap>, +) -> Option<(String, String)> { + let mut command = std::process::Command::new("cargo"); + if let Some(envs) = project_env { + command.envs(envs); + } + let output = command .current_dir(abs_path.parent()?) .arg("metadata") .arg("--no-deps") @@ -654,9 +663,17 @@ fn retrieve_package_id_and_bin_name_from_metadata( None } -fn human_readable_package_name(package_directory: &Path) -> Option { +fn human_readable_package_name( + package_directory: &Path, + project_env: Option<&HashMap>, +) -> Option { + let mut command = std::process::Command::new("cargo"); + if let Some(envs) = project_env { + command.envs(envs); + } + let pkgid = String::from_utf8( - std::process::Command::new("cargo") + command .current_dir(package_directory) .arg("pkgid") .output() diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 5d9ac5e8214efb..4318737e3875b8 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4890,11 +4890,22 @@ impl Project { }; cx.spawn(|project, mut cx| async move { + let project_env = project + .update(&mut cx, |project, cx| { + let worktree_abs_path = worktree_abs_path.clone(); + project.environment.update(cx, |environment, cx| { + environment.get_environment(worktree_id, worktree_abs_path, cx) + }) + }) + .ok()? + .await; + let mut task_variables = cx .update(|cx| { combine_task_variables( captured_variables, location, + project_env.as_ref(), BasicContextProvider::new(project.upgrade()?), cx, ) @@ -4905,16 +4916,6 @@ impl Project { // Remove all custom entries starting with _, as they're not intended for use by the end user. task_variables.sweep(); - let project_env = project - .update(&mut cx, |project, cx| { - let worktree_abs_path = worktree_abs_path.clone(); - project.environment.update(cx, |environment, cx| { - environment.get_environment(worktree_id, worktree_abs_path, cx) - }) - }) - .ok()? - .await; - Some(TaskContext { project_env: project_env.unwrap_or_default(), cwd: worktree_abs_path.map(|p| p.to_path_buf()), @@ -5111,6 +5112,7 @@ impl Project { fn combine_task_variables( mut captured_variables: TaskVariables, location: Location, + project_env: Option<&HashMap>, baseline: BasicContextProvider, cx: &mut AppContext, ) -> anyhow::Result { @@ -5120,13 +5122,13 @@ fn combine_task_variables( .language() .and_then(|language| language.context_provider()); let baseline = baseline - .build_context(&captured_variables, &location, cx) + .build_context(&captured_variables, &location, project_env, cx) .context("building basic default context")?; captured_variables.extend(baseline); if let Some(provider) = language_context_provider { captured_variables.extend( provider - .build_context(&captured_variables, &location, cx) + .build_context(&captured_variables, &location, project_env, cx) .context("building provider context")?, ); } diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 314903ec5da439..83c9c1f8e58be9 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -8,7 +8,7 @@ use std::{ }; use anyhow::Result; -use collections::{btree_map, BTreeMap, VecDeque}; +use collections::{btree_map, BTreeMap, HashMap, VecDeque}; use futures::{ channel::mpsc::{unbounded, UnboundedSender}, StreamExt, @@ -543,6 +543,7 @@ impl ContextProvider for BasicContextProvider { &self, _: &TaskVariables, location: &Location, + _: Option<&HashMap>, cx: &mut AppContext, ) -> Result { let buffer = location.buffer.read(cx); From 7814dd0301c39987d36c8dbee4cd1b5802e315cc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 21:50:37 +0200 Subject: [PATCH 159/762] Update Rust crate sysinfo to 0.31.0 (#17733) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [sysinfo](https://redirect.github.com/GuillaumeGomez/sysinfo) | workspace.dependencies | minor | `0.30.7` -> `0.31.0` | --- ### Release Notes
GuillaumeGomez/sysinfo (sysinfo) ### [`v0.31.4`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0314) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.31.3...v0.31.4) - macOS: Force memory cleanup in disk list retrieval. ### [`v0.31.3`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0313) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.31.2...v0.31.3) - Raspberry Pi: Fix temperature retrieval. ### [`v0.31.2`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0312) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.31.1...v0.31.2) - Remove `bstr` dependency (needed for rustc development). ### [`v0.31.1`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0311) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.31.0...v0.31.1) - Downgrade version of `memchr` (needed for rustc development). ### [`v0.31.0`](https://redirect.github.com/GuillaumeGomez/sysinfo/blob/HEAD/CHANGELOG.md#0310) [Compare Source](https://redirect.github.com/GuillaumeGomez/sysinfo/compare/v0.30.13...v0.31.0) - Split crate in features to only enable what you need. - Remove `System::refresh_process`, `System::refresh_process_specifics` and `System::refresh_pids` methods. - Add new argument of type `ProcessesToUpdate` to `System::refresh_processes` and `System::refresh_processes_specifics` methods. - Add new `NetworkData::ip_networks` method. - Add new `System::refresh_cpu_list` method. - Global CPU now only contains CPU usage. - Rename `TermalSensorType` to `ThermalSensorType`. - Process names is now an `OsString`. - Remove `System::global_cpu_info`. - Add `System::global_cpu_usage`. - macOS: Fix invalid CPU computation when single processes are refreshed one after the other. - Windows: Fix virtual memory computation. - Windows: Fix WoW64 parent process refresh. - Linux: Retrieve RSS (Resident Set Size) memory for cgroups.
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> --- Cargo.lock | 19 ++++--------------- Cargo.toml | 2 +- crates/client/src/telemetry.rs | 10 ++++++++-- crates/terminal/src/pty_info.rs | 15 ++++++++++----- 4 files changed, 23 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9cff8953932ade..930415440ba0ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11085,17 +11085,16 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.30.13" +version = "0.31.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a5b4ddaee55fb2bea2bf0e5000747e5f5c0de765e5a5ff87f4cd106439f4bb3" +checksum = "355dbe4f8799b304b05e1b0f05fc59b2a18d36645cf169607da45bde2f69a1be" dependencies = [ - "cfg-if", "core-foundation-sys", "libc", + "memchr", "ntapi", - "once_cell", "rayon", - "windows 0.52.0", + "windows 0.54.0", ] [[package]] @@ -13443,16 +13442,6 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "windows" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" -dependencies = [ - "windows-core 0.52.0", - "windows-targets 0.52.6", -] - [[package]] name = "windows" version = "0.54.0" diff --git a/Cargo.toml b/Cargo.toml index eea510edf2e0e9..0b392e02eb7b36 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -416,7 +416,7 @@ strsim = "0.11" strum = { version = "0.25.0", features = ["derive"] } subtle = "2.5.0" sys-locale = "0.3.1" -sysinfo = "0.30.7" +sysinfo = "0.31.0" tempfile = "3.9.0" thiserror = "1.0.29" tiktoken-rs = "0.5.9" diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 860288038bd9e2..b415cae14c60a2 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -304,7 +304,10 @@ impl Telemetry { let refresh_kind = ProcessRefreshKind::new().with_cpu().with_memory(); let current_process = Pid::from_u32(std::process::id()); - system.refresh_process_specifics(current_process, refresh_kind); + system.refresh_processes_specifics( + sysinfo::ProcessesToUpdate::Some(&[current_process]), + refresh_kind, + ); // Waiting some amount of time before the first query is important to get a reasonable value // https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage @@ -314,7 +317,10 @@ impl Telemetry { smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await; let current_process = Pid::from_u32(std::process::id()); - system.refresh_process_specifics(current_process, refresh_kind); + system.refresh_processes_specifics( + sysinfo::ProcessesToUpdate::Some(&[current_process]), + refresh_kind, + ); let Some(process) = system.process(current_process) else { log::error!( "Failed to find own process {current_process:?} in system process table" diff --git a/crates/terminal/src/pty_info.rs b/crates/terminal/src/pty_info.rs index 5fc3b05f78f139..559d022fda8a0c 100644 --- a/crates/terminal/src/pty_info.rs +++ b/crates/terminal/src/pty_info.rs @@ -98,9 +98,10 @@ impl PtyProcessInfo { fn refresh(&mut self) -> Option<&Process> { let pid = self.pid_getter.pid()?; - if self - .system - .refresh_process_specifics(pid, self.refresh_kind) + if self.system.refresh_processes_specifics( + sysinfo::ProcessesToUpdate::Some(&[pid]), + self.refresh_kind, + ) == 1 { self.system.process(pid) } else { @@ -116,9 +117,13 @@ impl PtyProcessInfo { .map_or(PathBuf::new(), |p| p.to_owned()); let info = ProcessInfo { - name: process.name().to_owned(), + name: process.name().to_str()?.to_owned(), cwd, - argv: process.cmd().to_vec(), + argv: process + .cmd() + .iter() + .filter_map(|s| s.to_str().map(ToOwned::to_owned)) + .collect(), }; self.current = Some(info.clone()); Some(info) From 8cc6df573cb5926ce501eb58e3ca898e43142a33 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 17 Sep 2024 14:13:37 -0600 Subject: [PATCH 160/762] SshLspAdapterDelegate (#17965) Release Notes: - N/A --- crates/assistant/src/assistant_panel.rs | 20 +- crates/languages/src/rust.rs | 2 +- crates/project/src/lsp_store.rs | 337 ++++++++++++------- crates/proto/proto/zed.proto | 28 +- crates/proto/src/proto.rs | 11 +- crates/remote/src/ssh_session.rs | 19 +- crates/remote_server/src/headless_project.rs | 2 + crates/remote_server/src/main.rs | 1 - 8 files changed, 270 insertions(+), 150 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 6eaa86f4a7aff9..5d06720fe0095b 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -54,7 +54,7 @@ use language_model::{ use language_model::{LanguageModelImage, LanguageModelToolUse}; use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; -use project::lsp_store::ProjectLspAdapterDelegate; +use project::lsp_store::LocalLspAdapterDelegate; use project::{Project, Worktree}; use search::{buffer_search::DivRegistrar, BufferSearchBar}; use serde::{Deserialize, Serialize}; @@ -5384,18 +5384,16 @@ fn make_lsp_adapter_delegate( let worktree = project .worktrees(cx) .next() - .ok_or_else(|| anyhow!("no worktrees when constructing ProjectLspAdapterDelegate"))?; - let fs = if project.is_local() { - Some(project.fs().clone()) - } else { - None - }; + .ok_or_else(|| anyhow!("no worktrees when constructing LocalLspAdapterDelegate"))?; let http_client = project.client().http_client().clone(); project.lsp_store().update(cx, |lsp_store, cx| { - Ok( - ProjectLspAdapterDelegate::new(lsp_store, &worktree, http_client, fs, None, cx) - as Arc, - ) + Ok(LocalLspAdapterDelegate::new( + lsp_store, + &worktree, + http_client, + project.fs().clone(), + cx, + ) as Arc) }) }) } diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index b55f350b9d9187..a32ffe50f519f1 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -77,7 +77,7 @@ impl LspAdapter for RustLspAdapter { { Ok(()) => (Some(path), Some(env), None), Err(err) => { - log::error!("failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {:?}", path, err); + log::error!("failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {}", path, err); (None, None, None) } } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 2c718a42abbb84..daacf26c3a4c04 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -2305,8 +2305,7 @@ impl LspStore { .read(cx) .worktree_for_id(*worktree_id, cx)?; let state = this.as_local()?.language_servers.get(server_id)?; - let delegate = - ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); match state { LanguageServerState::Starting(_) => None, LanguageServerState::Running { @@ -4368,7 +4367,7 @@ impl LspStore { let response = this .update(&mut cx, |this, cx| { let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); anyhow::Ok( cx.spawn(|_, _| async move { delegate.which(command.as_os_str()).await }), ) @@ -4389,7 +4388,7 @@ impl LspStore { let response = this .update(&mut cx, |this, cx| { let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = ProjectLspAdapterDelegate::for_local(this, &worktree, cx); + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); anyhow::Ok(cx.spawn(|_, _| async move { delegate.shell_env().await })) })?? .await; @@ -4398,6 +4397,52 @@ impl LspStore { env: response.into_iter().collect(), }) } + pub async fn handle_try_exec( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + let binary = envelope + .payload + .binary + .ok_or_else(|| anyhow!("missing binary"))?; + let binary = LanguageServerBinary { + path: PathBuf::from(binary.path), + env: None, + arguments: binary.arguments.into_iter().map(Into::into).collect(), + }; + this.update(&mut cx, |this, cx| { + let worktree = this.worktree_for_id(worktree_id, cx)?; + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); + anyhow::Ok(cx.spawn(|_, _| async move { delegate.try_exec(binary).await })) + })?? + .await?; + + Ok(proto::Ack {}) + } + + pub async fn handle_read_text_file( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let path = envelope + .payload + .path + .ok_or_else(|| anyhow!("missing path"))?; + let worktree_id = WorktreeId::from_proto(path.worktree_id); + let path = PathBuf::from(path.path); + let response = this + .update(&mut cx, |this, cx| { + let worktree = this.worktree_for_id(worktree_id, cx)?; + let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); + anyhow::Ok(cx.spawn(|_, _| async move { delegate.read_text_file(path).await })) + })?? + .await?; + + Ok(proto::ReadTextFileResponse { text: response }) + } async fn handle_apply_additional_edits_for_completion( this: Model, @@ -4535,9 +4580,12 @@ impl LspStore { ) { let ssh = self.as_ssh().unwrap(); - let delegate = - ProjectLspAdapterDelegate::for_ssh(self, worktree, ssh.upstream_client.clone(), cx) - as Arc; + let delegate = Arc::new(SshLspAdapterDelegate { + lsp_store: cx.handle().downgrade(), + worktree: worktree.read(cx).snapshot(), + upstream_client: ssh.upstream_client.clone(), + language_registry: self.languages.clone(), + }) as Arc; // TODO: We should use `adapter` here instead of reaching through the `CachedLspAdapter`. let lsp_adapter = adapter.adapter.clone(); @@ -4645,7 +4693,7 @@ impl LspStore { let local = self.as_local().unwrap(); let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); - let lsp_adapter_delegate = ProjectLspAdapterDelegate::for_local(self, worktree_handle, cx); + let lsp_adapter_delegate = LocalLspAdapterDelegate::for_local(self, worktree_handle, cx); let project_environment = local.environment.update(cx, |environment, cx| { environment.get_environment(Some(worktree_id), Some(worktree_path.clone()), cx) }); @@ -6938,18 +6986,32 @@ impl LspAdapter for SshLspAdapter { None } } +pub fn language_server_settings<'a, 'b: 'a>( + delegate: &'a dyn LspAdapterDelegate, + language: &str, + cx: &'b AppContext, +) -> Option<&'a LspSettings> { + ProjectSettings::get( + Some(SettingsLocation { + worktree_id: delegate.worktree_id(), + path: delegate.worktree_root_path(), + }), + cx, + ) + .lsp + .get(language) +} -pub struct ProjectLspAdapterDelegate { +pub struct LocalLspAdapterDelegate { lsp_store: WeakModel, worktree: worktree::Snapshot, - fs: Option>, + fs: Arc, http_client: Arc, language_registry: Arc, load_shell_env_task: Shared>>>, - upstream_client: Option, } -impl ProjectLspAdapterDelegate { +impl LocalLspAdapterDelegate { fn for_local( lsp_store: &LspStore, worktree: &Model, @@ -6957,45 +7019,37 @@ impl ProjectLspAdapterDelegate { ) -> Arc { let local = lsp_store .as_local() - .expect("ProjectLspAdapterDelegate cannot be constructed on a remote"); + .expect("LocalLspAdapterDelegate cannot be constructed on a remote"); let http_client = local .http_client .clone() .unwrap_or_else(|| Arc::new(BlockedHttpClient)); - Self::new( - lsp_store, - worktree, - http_client, - Some(local.fs.clone()), - None, - cx, - ) - } - - fn for_ssh( - lsp_store: &LspStore, - worktree: &Model, - upstream_client: AnyProtoClient, - cx: &mut ModelContext, - ) -> Arc { - Self::new( - lsp_store, - worktree, - Arc::new(BlockedHttpClient), - None, - Some(upstream_client), - cx, - ) - } + Self::new(lsp_store, worktree, http_client, local.fs.clone(), cx) + } + + // fn for_ssh( + // lsp_store: &LspStore, + // worktree: &Model, + // upstream_client: AnyProtoClient, + // cx: &mut ModelContext, + // ) -> Arc { + // Self::new( + // lsp_store, + // worktree, + // Arc::new(BlockedHttpClient), + // None, + // Some(upstream_client), + // cx, + // ) + // } pub fn new( lsp_store: &LspStore, worktree: &Model, http_client: Arc, - fs: Option>, - upstream_client: Option, + fs: Arc, cx: &mut ModelContext, ) -> Arc { let worktree_id = worktree.read(cx).id(); @@ -7015,52 +7069,14 @@ impl ProjectLspAdapterDelegate { worktree: worktree.read(cx).snapshot(), fs, http_client, - upstream_client, language_registry: lsp_store.languages.clone(), load_shell_env_task, }) } } -struct BlockedHttpClient; - -impl HttpClient for BlockedHttpClient { - fn send( - &self, - _req: Request, - ) -> BoxFuture<'static, Result, Error>> { - Box::pin(async { - Err(std::io::Error::new( - std::io::ErrorKind::PermissionDenied, - "ssh host blocked http connection", - ) - .into()) - }) - } - - fn proxy(&self) -> Option<&Uri> { - None - } -} - -pub fn language_server_settings<'a, 'b: 'a>( - delegate: &'a dyn LspAdapterDelegate, - language: &str, - cx: &'b AppContext, -) -> Option<&'a LspSettings> { - ProjectSettings::get( - Some(SettingsLocation { - worktree_id: delegate.worktree_id(), - path: delegate.worktree_root_path(), - }), - cx, - ) - .lsp - .get(language) -} - #[async_trait] -impl LspAdapterDelegate for ProjectLspAdapterDelegate { +impl LspAdapterDelegate for LocalLspAdapterDelegate { fn show_notification(&self, message: &str, cx: &mut AppContext) { self.lsp_store .update(cx, |_, cx| { @@ -7082,42 +7098,12 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { } async fn shell_env(&self) -> HashMap { - if let Some(upstream_client) = &self.upstream_client { - use rpc::proto::SSH_PROJECT_ID; - - return upstream_client - .request(proto::ShellEnv { - project_id: SSH_PROJECT_ID, - worktree_id: self.worktree_id().to_proto(), - }) - .await - .map(|response| response.env.into_iter().collect()) - .unwrap_or_default(); - } - let task = self.load_shell_env_task.clone(); task.await.unwrap_or_default() } #[cfg(not(target_os = "windows"))] async fn which(&self, command: &OsStr) -> Option { - if let Some(upstream_client) = &self.upstream_client { - use rpc::proto::SSH_PROJECT_ID; - - return upstream_client - .request(proto::WhichCommand { - project_id: SSH_PROJECT_ID, - worktree_id: self.worktree_id().to_proto(), - command: command.to_string_lossy().to_string(), - }) - .await - .log_err() - .and_then(|response| response.path) - .map(PathBuf::from); - } - - self.fs.as_ref()?; - let worktree_abs_path = self.worktree.abs_path(); let shell_path = self.shell_env().await.get("PATH").cloned(); which::which_in(command, shell_path.as_ref(), worktree_abs_path).ok() @@ -7125,8 +7111,6 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { #[cfg(target_os = "windows")] async fn which(&self, command: &OsStr) -> Option { - self.fs.as_ref()?; - // todo(windows) Getting the shell env variables in a current directory on Windows is more complicated than other platforms // there isn't a 'default shell' necessarily. The closest would be the default profile on the windows terminal // SEE: https://learn.microsoft.com/en-us/windows/terminal/customize-settings/startup @@ -7134,10 +7118,6 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { } async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { - if self.fs.is_none() { - return Ok(()); - } - let working_dir = self.worktree_root_path(); let output = smol::process::Command::new(&command.path) .args(command.arguments) @@ -7170,12 +7150,127 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate { if self.worktree.entry_for_path(&path).is_none() { return Err(anyhow!("no such path {path:?}")); }; - if let Some(fs) = &self.fs { - let content = fs.load(&path).await?; - Ok(content) - } else { - return Err(anyhow!("cannot open {path:?} on ssh host (yet!)")); - } + self.fs.load(&path).await + } +} + +struct BlockedHttpClient; + +impl HttpClient for BlockedHttpClient { + fn send( + &self, + _req: Request, + ) -> BoxFuture<'static, Result, Error>> { + Box::pin(async { + Err(std::io::Error::new( + std::io::ErrorKind::PermissionDenied, + "ssh host blocked http connection", + ) + .into()) + }) + } + + fn proxy(&self) -> Option<&Uri> { + None + } +} + +struct SshLspAdapterDelegate { + lsp_store: WeakModel, + worktree: worktree::Snapshot, + upstream_client: AnyProtoClient, + language_registry: Arc, +} + +#[async_trait] +impl LspAdapterDelegate for SshLspAdapterDelegate { + fn show_notification(&self, message: &str, cx: &mut AppContext) { + self.lsp_store + .update(cx, |_, cx| { + cx.emit(LspStoreEvent::Notification(message.to_owned())) + }) + .ok(); + } + + fn http_client(&self) -> Arc { + Arc::new(BlockedHttpClient) + } + + fn worktree_id(&self) -> WorktreeId { + self.worktree.id() + } + + fn worktree_root_path(&self) -> &Path { + self.worktree.abs_path().as_ref() + } + + async fn shell_env(&self) -> HashMap { + use rpc::proto::SSH_PROJECT_ID; + + self.upstream_client + .request(proto::ShellEnv { + project_id: SSH_PROJECT_ID, + worktree_id: self.worktree_id().to_proto(), + }) + .await + .map(|response| response.env.into_iter().collect()) + .unwrap_or_default() + } + + async fn which(&self, command: &OsStr) -> Option { + use rpc::proto::SSH_PROJECT_ID; + + self.upstream_client + .request(proto::WhichCommand { + project_id: SSH_PROJECT_ID, + worktree_id: self.worktree_id().to_proto(), + command: command.to_string_lossy().to_string(), + }) + .await + .log_err() + .and_then(|response| response.path) + .map(PathBuf::from) + } + + async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { + self.upstream_client + .request(proto::TryExec { + project_id: rpc::proto::SSH_PROJECT_ID, + worktree_id: self.worktree.id().to_proto(), + binary: Some(proto::LanguageServerCommand { + path: command.path.to_string_lossy().to_string(), + arguments: command + .arguments + .into_iter() + .map(|s| s.to_string_lossy().to_string()) + .collect(), + env: command.env.unwrap_or_default().into_iter().collect(), + }), + }) + .await?; + Ok(()) + } + + fn update_status( + &self, + server_name: LanguageServerName, + status: language::LanguageServerBinaryStatus, + ) { + self.language_registry + .update_lsp_status(server_name, status); + } + + async fn read_text_file(&self, path: PathBuf) -> Result { + self.upstream_client + .request(proto::ReadTextFile { + project_id: rpc::proto::SSH_PROJECT_ID, + path: Some(proto::ProjectPath { + worktree_id: self.worktree.id().to_proto(), + path: path.to_string_lossy().to_string(), + }), + }) + .await + .map(|r| r.text) } } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 77942c8a94d542..a886b2185556f3 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -289,7 +289,11 @@ message Envelope { WhichCommandResponse which_command_response = 249; ShellEnv shell_env = 250; - ShellEnvResponse shell_env_response = 251; // current max + ShellEnvResponse shell_env_response = 251; + + TryExec try_exec = 252; + ReadTextFile read_text_file = 253; + ReadTextFileResponse read_text_file_response = 254; // current max } reserved 158 to 161; @@ -2551,13 +2555,21 @@ message ShellEnvResponse { map env = 1; } -// message RestartLanguageServer { - -// } -// message DestroyLanguageServer { +message ReadTextFile { + uint64 project_id = 1; + ProjectPath path = 2; +} -// } +message ReadTextFileResponse { + string text = 1; +} -// message LspWorkspaceConfiguration { +message TryExec { + uint64 project_id = 1; + uint64 worktree_id = 2; + LanguageServerCommand binary = 3; +} -// } +message TryExecResponse { + string text = 1; +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index a1853ed4a3bb5b..b5a00d16704c41 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -370,6 +370,9 @@ messages!( (WhichCommandResponse, Foreground), (ShellEnv, Foreground), (ShellEnvResponse, Foreground), + (TryExec, Foreground), + (ReadTextFile, Foreground), + (ReadTextFileResponse, Foreground) ); request_messages!( @@ -495,7 +498,9 @@ request_messages!( (AddWorktree, AddWorktreeResponse), (CreateLanguageServer, Ack), (WhichCommand, WhichCommandResponse), - (ShellEnv, ShellEnvResponse) + (ShellEnv, ShellEnvResponse), + (ReadTextFile, ReadTextFileResponse), + (TryExec, Ack), ); entity_messages!( @@ -571,7 +576,9 @@ entity_messages!( UpdateUserSettings, CreateLanguageServer, WhichCommand, - ShellEnv + ShellEnv, + TryExec, + ReadTextFile ); entity_messages!( diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 4762a785dbdcd2..7556b38f3ed0f4 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -15,7 +15,7 @@ use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion}; use parking_lot::Mutex; use rpc::{ proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage}, - EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, + EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, RpcError, }; use smol::{ fs, @@ -157,8 +157,9 @@ impl SshSession { let mut remote_server_child = socket .ssh_command(format!( - "RUST_LOG={} {:?} run", + "RUST_LOG={} RUST_BACKTRACE={} {:?} run", std::env::var("RUST_LOG").unwrap_or_default(), + std::env::var("RUST_BACKTRACE").unwrap_or_default(), remote_binary_path, )) .spawn() @@ -349,7 +350,7 @@ impl SshSession { } Err(error) => { log::error!( - "error handling message. type:{type_name}, error:{error:?}", + "error handling message. type:{type_name}, error:{error}", ); } } @@ -371,7 +372,7 @@ impl SshSession { payload: T, ) -> impl 'static + Future> { log::debug!("ssh request start. name:{}", T::NAME); - let response = self.request_dynamic(payload.into_envelope(0, None, None), ""); + let response = self.request_dynamic(payload.into_envelope(0, None, None), T::NAME); async move { let response = response.await?; log::debug!("ssh request finish. name:{}", T::NAME); @@ -388,7 +389,7 @@ impl SshSession { pub fn request_dynamic( &self, mut envelope: proto::Envelope, - _request_type: &'static str, + type_name: &'static str, ) -> impl 'static + Future> { envelope.id = self.next_message_id.fetch_add(1, SeqCst); let (tx, rx) = oneshot::channel(); @@ -396,7 +397,13 @@ impl SshSession { response_channels_lock.insert(MessageId(envelope.id), tx); drop(response_channels_lock); self.outgoing_tx.unbounded_send(envelope).ok(); - async move { Ok(rx.await.context("connection lost")?.0) } + async move { + let response = rx.await.context("connection lost")?.0; + if let Some(proto::envelope::Payload::Error(error)) = &response.payload { + return Err(RpcError::from_proto(error, type_name)); + } + Ok(response) + } } pub fn send_dynamic(&self, mut envelope: proto::Envelope) -> Result<()> { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 58f5cb0c203cf1..35d6630c1e11be 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -107,6 +107,8 @@ impl HeadlessProject { client.add_model_request_handler(LspStore::handle_create_language_server); client.add_model_request_handler(LspStore::handle_which_command); client.add_model_request_handler(LspStore::handle_shell_env); + client.add_model_request_handler(LspStore::handle_try_exec); + client.add_model_request_handler(LspStore::handle_read_text_file); BufferStore::init(&client); WorktreeStore::init(&client); diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 696022a4561094..908a0a89b6273b 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -24,7 +24,6 @@ fn main() { #[cfg(not(windows))] fn main() { - env::set_var("RUST_BACKTRACE", "1"); env_logger::builder() .format(|buf, record| { serde_json::to_writer(&mut *buf, &LogRecord::new(record))?; From bdca342cdc2cd4d884d13c2246f0bc89365f3a9a Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 17 Sep 2024 16:28:09 -0400 Subject: [PATCH 161/762] Fix "view release notes" on dev/nightly builds (#17967) --- crates/auto_update/src/auto_update.rs | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 8063ff4c40fca3..c0863e41d1aa9c 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -244,19 +244,22 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<( let auto_updater = AutoUpdater::get(cx)?; let release_channel = ReleaseChannel::try_global(cx)?; - if matches!( - release_channel, - ReleaseChannel::Stable | ReleaseChannel::Preview - ) { - let auto_updater = auto_updater.read(cx); - let release_channel = release_channel.dev_name(); - let current_version = auto_updater.current_version; - let url = &auto_updater - .http_client - .build_url(&format!("/releases/{release_channel}/{current_version}")); - cx.open_url(url); + match release_channel { + ReleaseChannel::Stable | ReleaseChannel::Preview => { + let auto_updater = auto_updater.read(cx); + let current_version = auto_updater.current_version; + let release_channel = release_channel.dev_name(); + let path = format!("/releases/{release_channel}/{current_version}"); + let url = &auto_updater.http_client.build_url(&path); + cx.open_url(url); + } + ReleaseChannel::Nightly => { + cx.open_url("https://github.com/zed-industries/zed/commits/nightly/"); + } + ReleaseChannel::Dev => { + cx.open_url("https://github.com/zed-industries/zed/commits/main/"); + } } - None } From 51faf4a1cd8965bf014660a63cc6c18e3e39c5cb Mon Sep 17 00:00:00 2001 From: Marek Fajkus Date: Tue, 17 Sep 2024 22:28:52 +0200 Subject: [PATCH 162/762] Add missing cmake dependency to Nix build (#17968) cmake is required during build of dependecies and thus needs to be supplied in nativeBuildInputs (dependecies required for build not during runtime). This fixes (sandboxed) nix builds of the project. Release Notes: - N/A --- nix/build.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nix/build.nix b/nix/build.nix index cd000e9e918969..4782c9a56fda21 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -28,6 +28,7 @@ stdenvAdapters, nix-gitignore, withGLES ? false, + cmake, }: let includeFilter = path: type: let baseName = baseNameOf (toString path); @@ -58,6 +59,7 @@ pkg-config protobuf rustPlatform.bindgenHook + cmake ]; buildInputs = [ From e7912370e66a8da7b7aa5560bb616590f0a9ad91 Mon Sep 17 00:00:00 2001 From: Graham Taylor Date: Tue, 17 Sep 2024 18:31:06 -0400 Subject: [PATCH 163/762] perplexity: Remove duplicate step and fix numbering in README (#17978) Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- extensions/perplexity/README.md | 38 +++++++++++++++++---------------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/extensions/perplexity/README.md b/extensions/perplexity/README.md index 094a876885e97f..405356dc535a36 100644 --- a/extensions/perplexity/README.md +++ b/extensions/perplexity/README.md @@ -12,30 +12,32 @@ Open the AI Assistant panel (`cmd-r` or `ctrl-r`) and enter: ## Development Setup -1. Install the rust toolchain and clone the zed repo: -``` -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +1. Install the Rust toolchain and clone the zed repo: -mkdir -p ~/code -cd ~/code -git clone https://github.com/zed-industries/zed -``` + ``` + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -2. Launch Zed and Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) -3. Open Zed -4. Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) -5. Click "Install Dev Extension" -6. Navigate to the "extensions/perplexity" folder inside the zed git repo. -7. Ensure your `PERPLEXITY_API_KEY` environment variable is set (instructions below) -```sh -env |grep PERPLEXITY_API_KEY -``` -8. Quit and relaunch Zed + mkdir -p ~/code + cd ~/code + git clone https://github.com/zed-industries/zed + ``` + +1. Open Zed +1. Open Zed Extensions (`cmd-shift-x` / `ctrl-shift-x`) +1. Click "Install Dev Extension" +1. Navigate to the "extensions/perplexity" folder inside the zed git repo. +1. Ensure your `PERPLEXITY_API_KEY` environment variable is set (instructions below) + + ```sh + env | grep PERPLEXITY_API_KEY + ``` + +1. Quit and relaunch Zed ## PERPLEXITY_API_KEY This extension requires a Perplexity API key to be available via the `PERPLEXITY_API_KEY` environment variable. -To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. +To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. Take your API key and add it to your environment by adding `export PERPLEXITY_API_KEY="pplx-0123456789abcdef..."` to your `~/.zshrc` or `~/.bashrc`. Reload close and reopen your terminal session. Check with `env |grep PERPLEXITY_API_KEY`. From db18f7a2b008452a5c64a87735ed9cdd916ebcbd Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 18:32:22 -0400 Subject: [PATCH 164/762] rust: Fix doc comment highlighting (#17976) This PR fixes an issue where `/` and `!` in Rust doc comments were being incorrectly highlighted as operators after #17734. We solve this by removing them from the operators list and using more scoped queries to highlight them. Release Notes: - N/A --------- Co-authored-by: Max --- crates/languages/src/rust/highlights.scm | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/languages/src/rust/highlights.scm b/crates/languages/src/rust/highlights.scm index 0c3aa7efc7a2a5..8d4bad06e06ea9 100644 --- a/crates/languages/src/rust/highlights.scm +++ b/crates/languages/src/rust/highlights.scm @@ -139,7 +139,6 @@ ] @comment.doc [ - "!" "!=" "%" "%=" @@ -159,7 +158,6 @@ ".." "..=" "..." - "/" "/=" ":" ";" @@ -183,6 +181,10 @@ "?" ] @operator +; Avoid highlighting these as operators when used in doc comments. +(unary_expression "!" @operator) +operator: "/" @operator + (lifetime) @lifetime (parameter (identifier) @variable.parameter) From 8e45bf71cae37cbbc99dda67fdce57aed4d2d8fd Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 17 Sep 2024 16:37:56 -0600 Subject: [PATCH 165/762] Refactor prettier (#17977) In preparation for making formatting work on ssh remotes Release Notes: - N/A Co-authored-by: Mikayla --- crates/node_runtime/src/node_runtime.rs | 49 + crates/project/src/lsp_store.rs | 194 ++- ...{prettier_support.rs => prettier_store.rs} | 1122 +++++++++-------- crates/project/src/project.rs | 218 +--- crates/remote_server/Cargo.toml | 1 + crates/remote_server/src/headless_project.rs | 14 +- 6 files changed, 902 insertions(+), 696 deletions(-) rename crates/project/src/{prettier_support.rs => prettier_store.rs} (65%) diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 73650d73c9d219..4aa65ab6db8049 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -462,3 +462,52 @@ impl NodeRuntime for FakeNodeRuntime { unreachable!("Should not install packages {packages:?}") } } + +// TODO: Remove this when headless binary can run node +pub struct DummyNodeRuntime; + +impl DummyNodeRuntime { + pub fn new() -> Arc { + Arc::new(Self) + } +} + +#[async_trait::async_trait] +impl NodeRuntime for DummyNodeRuntime { + async fn binary_path(&self) -> anyhow::Result { + anyhow::bail!("Dummy Node Runtime") + } + + async fn node_environment_path(&self) -> anyhow::Result { + anyhow::bail!("Dummy node runtime") + } + + async fn run_npm_subcommand( + &self, + _: Option<&Path>, + _subcommand: &str, + _args: &[&str], + ) -> anyhow::Result { + anyhow::bail!("Dummy node runtime") + } + + async fn npm_package_latest_version(&self, _name: &str) -> anyhow::Result { + anyhow::bail!("Dummy node runtime") + } + + async fn npm_package_installed_version( + &self, + _local_package_directory: &Path, + _name: &str, + ) -> Result> { + anyhow::bail!("Dummy node runtime") + } + + async fn npm_install_packages( + &self, + _: &Path, + _packages: &[(&str, &str)], + ) -> anyhow::Result<()> { + anyhow::bail!("Dummy node runtime") + } +} diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index daacf26c3a4c04..35eb20259c1393 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -3,6 +3,7 @@ use crate::{ environment::ProjectEnvironment, lsp_command::{self, *}, lsp_ext_command, + prettier_store::{self, PrettierStore, PrettierStoreEvent}, project_settings::{LspSettings, ProjectSettings}, relativize_path, resolve_path, worktree_store::{WorktreeStore, WorktreeStoreEvent}, @@ -101,6 +102,8 @@ pub struct LocalLspStore { HashMap>>, supplementary_language_servers: HashMap)>, + prettier_store: Model, + current_lsp_settings: HashMap, LspSettings>, _subscription: gpui::Subscription, } @@ -135,6 +138,7 @@ impl RemoteLspStore {} pub struct SshLspStore { upstream_client: AnyProtoClient, + current_lsp_settings: HashMap, LspSettings>, } #[allow(clippy::large_enum_variant)] @@ -310,9 +314,32 @@ impl LspStore { } } + pub fn swap_current_lsp_settings( + &mut self, + new_settings: HashMap, LspSettings>, + ) -> Option, LspSettings>> { + match &mut self.mode { + LspStoreMode::Ssh(SshLspStore { + current_lsp_settings, + .. + }) + | LspStoreMode::Local(LocalLspStore { + current_lsp_settings, + .. + }) => { + let ret = mem::take(current_lsp_settings); + *current_lsp_settings = new_settings; + Some(ret) + } + LspStoreMode::Remote(_) => None, + } + } + + #[allow(clippy::too_many_arguments)] pub fn new_local( buffer_store: Model, worktree_store: Model, + prettier_store: Model, environment: Model, languages: Arc, http_client: Option>, @@ -324,6 +351,10 @@ impl LspStore { .detach(); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); + cx.subscribe(&prettier_store, Self::on_prettier_store_event) + .detach(); + cx.observe_global::(Self::on_settings_changed) + .detach(); Self { mode: LspStoreMode::Local(LocalLspStore { @@ -332,6 +363,8 @@ impl LspStore { last_workspace_edits_by_language_server: Default::default(), language_server_watched_paths: Default::default(), language_server_watcher_registrations: Default::default(), + current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), + prettier_store, environment, http_client, fs, @@ -387,9 +420,14 @@ impl LspStore { .detach(); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); + cx.observe_global::(Self::on_settings_changed) + .detach(); Self { - mode: LspStoreMode::Ssh(SshLspStore { upstream_client }), + mode: LspStoreMode::Ssh(SshLspStore { + upstream_client, + current_lsp_settings: Default::default(), + }), downstream_client: None, project_id, buffer_store, @@ -401,6 +439,7 @@ impl LspStore { buffer_snapshots: Default::default(), next_diagnostic_group_id: Default::default(), diagnostic_summaries: Default::default(), + diagnostics: Default::default(), active_entry: None, _maintain_workspace_config: Self::maintain_workspace_config(cx), @@ -498,6 +537,36 @@ impl LspStore { } } + fn on_prettier_store_event( + &mut self, + _: Model, + event: &PrettierStoreEvent, + cx: &mut ModelContext, + ) { + match event { + PrettierStoreEvent::LanguageServerRemoved(prettier_server_id) => { + self.unregister_supplementary_language_server(*prettier_server_id, cx); + } + PrettierStoreEvent::LanguageServerAdded { + new_server_id, + name, + prettier_server, + } => { + self.register_supplementary_language_server( + *new_server_id, + name.clone(), + prettier_server.clone(), + cx, + ); + } + } + } + + // todo! + pub fn prettier_store(&self) -> Option> { + self.as_local().map(|local| local.prettier_store.clone()) + } + fn on_buffer_event( &mut self, buffer: Model, @@ -656,11 +725,29 @@ impl LspStore { }); let buffer_file = buffer.read(cx).file().cloned(); + let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone(); let buffer_file = File::from_dyn(buffer_file.as_ref()); - if let Some(file) = buffer_file { + let worktree_id = if let Some(file) = buffer_file { let worktree = file.worktree.clone(); - self.start_language_servers(&worktree, new_language.name(), cx) + self.start_language_servers(&worktree, new_language.name(), cx); + + Some(worktree.read(cx).id()) + } else { + None + }; + + if let Some(prettier_plugins) = prettier_store::prettier_plugins_for_language(&settings) { + let prettier_store = self.as_local().map(|s| s.prettier_store.clone()); + if let Some(prettier_store) = prettier_store { + prettier_store.update(cx, |prettier_store, cx| { + prettier_store.install_default_prettier( + worktree_id, + prettier_plugins.iter().map(|s| Arc::from(s.as_str())), + cx, + ) + }) + } } cx.emit(LspStoreEvent::LanguageDetected { @@ -799,6 +886,95 @@ impl LspStore { Task::ready(Ok(Default::default())) } + fn on_settings_changed(&mut self, cx: &mut ModelContext) { + let mut language_servers_to_start = Vec::new(); + let mut language_formatters_to_check = Vec::new(); + for buffer in self.buffer_store.read(cx).buffers() { + let buffer = buffer.read(cx); + let buffer_file = File::from_dyn(buffer.file()); + let buffer_language = buffer.language(); + let settings = language_settings(buffer_language, buffer.file(), cx); + if let Some(language) = buffer_language { + if settings.enable_language_server { + if let Some(file) = buffer_file { + language_servers_to_start.push((file.worktree.clone(), language.name())); + } + } + language_formatters_to_check + .push((buffer_file.map(|f| f.worktree_id(cx)), settings.clone())); + } + } + + let mut language_servers_to_stop = Vec::new(); + let mut language_servers_to_restart = Vec::new(); + let languages = self.languages.to_vec(); + + let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone(); + let Some(current_lsp_settings) = self.swap_current_lsp_settings(new_lsp_settings.clone()) + else { + return; + }; + for (worktree_id, started_lsp_name) in self.started_language_servers() { + let language = languages.iter().find_map(|l| { + let adapter = self + .languages + .lsp_adapters(&l.name()) + .iter() + .find(|adapter| adapter.name == started_lsp_name)? + .clone(); + Some((l, adapter)) + }); + if let Some((language, adapter)) = language { + let worktree = self.worktree_for_id(worktree_id, cx).ok(); + let file = worktree.as_ref().and_then(|tree| { + tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _)) + }); + if !language_settings(Some(language), file.as_ref(), cx).enable_language_server { + language_servers_to_stop.push((worktree_id, started_lsp_name.clone())); + } else if let Some(worktree) = worktree { + let server_name = &adapter.name.0; + match ( + current_lsp_settings.get(server_name), + new_lsp_settings.get(server_name), + ) { + (None, None) => {} + (Some(_), None) | (None, Some(_)) => { + language_servers_to_restart.push((worktree, language.name())); + } + (Some(current_lsp_settings), Some(new_lsp_settings)) => { + if current_lsp_settings != new_lsp_settings { + language_servers_to_restart.push((worktree, language.name())); + } + } + } + } + } + } + + for (worktree_id, adapter_name) in language_servers_to_stop { + self.stop_language_server(worktree_id, adapter_name, cx) + .detach(); + } + + if let Some(prettier_store) = self.as_local().map(|s| s.prettier_store.clone()) { + prettier_store.update(cx, |prettier_store, cx| { + prettier_store.on_settings_changed(language_formatters_to_check, cx) + }) + } + + // Start all the newly-enabled language servers. + for (worktree, language) in language_servers_to_start { + self.start_language_servers(&worktree, language, cx); + } + + // Restart all language servers with changed initialization options. + for (worktree, language) in language_servers_to_restart { + self.restart_language_servers(worktree, language, cx); + } + + cx.notify(); + } + pub async fn execute_code_actions_on_servers( this: &WeakModel, adapters_and_servers: &[(Arc, Arc)], @@ -2375,7 +2551,7 @@ impl LspStore { }) } - pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { + fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { self.diagnostics.remove(&id_to_remove); self.diagnostic_summaries.remove(&id_to_remove); @@ -2406,6 +2582,12 @@ impl LspStore { } cx.emit(LspStoreEvent::LanguageServerRemoved(server_id_to_remove)); } + + if let Some(local) = self.as_local() { + local.prettier_store.update(cx, |prettier_store, cx| { + prettier_store.remove_worktree(id_to_remove, cx); + }) + } } pub fn shared( @@ -6117,6 +6299,10 @@ impl LspStore { let Some(local) = self.as_local() else { return }; + local.prettier_store.update(cx, |prettier_store, cx| { + prettier_store.update_prettier_settings(&worktree_handle, changes, cx) + }); + let worktree_id = worktree_handle.read(cx).id(); let mut language_server_ids = self .language_server_ids diff --git a/crates/project/src/prettier_support.rs b/crates/project/src/prettier_store.rs similarity index 65% rename from crates/project/src/prettier_support.rs rename to crates/project/src/prettier_store.rs index e90a1dbdf767f6..29101917fb9aa5 100644 --- a/crates/project/src/prettier_support.rs +++ b/crates/project/src/prettier_store.rs @@ -5,444 +5,384 @@ use std::{ }; use anyhow::{anyhow, Context, Result}; -use collections::HashSet; +use collections::{HashMap, HashSet}; use fs::Fs; use futures::{ future::{self, Shared}, + stream::FuturesUnordered, FutureExt, }; -use gpui::{AsyncAppContext, Model, ModelContext, Task, WeakModel}; +use gpui::{AsyncAppContext, EventEmitter, Model, ModelContext, Task, WeakModel}; use language::{ language_settings::{Formatter, LanguageSettings, SelectedFormatter}, - Buffer, LanguageServerName, LocalFile, + Buffer, LanguageRegistry, LanguageServerName, LocalFile, }; use lsp::{LanguageServer, LanguageServerId}; use node_runtime::NodeRuntime; use paths::default_prettier_dir; use prettier::Prettier; +use smol::stream::StreamExt; use util::{ResultExt, TryFutureExt}; -use crate::{File, FormatOperation, PathChange, Project, ProjectEntryId, Worktree, WorktreeId}; - -pub fn prettier_plugins_for_language( - language_settings: &LanguageSettings, -) -> Option<&HashSet> { - match &language_settings.formatter { - SelectedFormatter::Auto => Some(&language_settings.prettier.plugins), - - SelectedFormatter::List(list) => list - .as_ref() - .contains(&Formatter::Prettier) - .then_some(&language_settings.prettier.plugins), - } -} - -pub(super) async fn format_with_prettier( - project: &WeakModel, - buffer: &Model, - cx: &mut AsyncAppContext, -) -> Option> { - let prettier_instance = project - .update(cx, |project, cx| { - project.prettier_instance_for_buffer(buffer, cx) - }) - .ok()? - .await; - - let (prettier_path, prettier_task) = prettier_instance?; - - let prettier_description = match prettier_path.as_ref() { - Some(path) => format!("prettier at {path:?}"), - None => "default prettier instance".to_string(), - }; - - match prettier_task.await { - Ok(prettier) => { - let buffer_path = buffer - .update(cx, |buffer, cx| { - File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) - }) - .ok() - .flatten(); - - let format_result = prettier - .format(buffer, buffer_path, cx) - .await - .map(FormatOperation::Prettier) - .with_context(|| format!("{} failed to format buffer", prettier_description)); - - Some(format_result) - } - Err(error) => { - project - .update(cx, |project, _| { - let instance_to_update = match prettier_path { - Some(prettier_path) => project.prettier_instances.get_mut(&prettier_path), - None => match &mut project.default_prettier.prettier { - PrettierInstallation::NotInstalled { .. } => None, - PrettierInstallation::Installed(instance) => Some(instance), - }, - }; - - if let Some(instance) = instance_to_update { - instance.attempt += 1; - instance.prettier = None; - } - }) - .log_err(); - - Some(Err(anyhow!( - "{} failed to spawn: {error:#}", - prettier_description - ))) - } - } -} +use crate::{ + worktree_store::WorktreeStore, File, FormatOperation, PathChange, ProjectEntryId, Worktree, + WorktreeId, +}; -pub struct DefaultPrettier { - prettier: PrettierInstallation, - installed_plugins: HashSet>, +pub struct PrettierStore { + node: Arc, + fs: Arc, + languages: Arc, + worktree_store: Model, + default_prettier: DefaultPrettier, + prettiers_per_worktree: HashMap>>, + prettier_instances: HashMap, } -#[derive(Debug)] -pub enum PrettierInstallation { - NotInstalled { - attempts: usize, - installation_task: Option>>>>, - not_installed_plugins: HashSet>, +pub enum PrettierStoreEvent { + LanguageServerRemoved(LanguageServerId), + LanguageServerAdded { + new_server_id: LanguageServerId, + name: LanguageServerName, + prettier_server: Arc, }, - Installed(PrettierInstance), } -pub type PrettierTask = Shared, Arc>>>; - -#[derive(Debug, Clone)] -pub struct PrettierInstance { - attempt: usize, - prettier: Option, -} +impl EventEmitter for PrettierStore {} -impl Default for DefaultPrettier { - fn default() -> Self { +impl PrettierStore { + pub fn new( + node: Arc, + fs: Arc, + languages: Arc, + worktree_store: Model, + _: &mut ModelContext, + ) -> Self { Self { - prettier: PrettierInstallation::NotInstalled { - attempts: 0, - installation_task: None, - not_installed_plugins: HashSet::default(), - }, - installed_plugins: HashSet::default(), + node, + fs, + languages, + worktree_store, + default_prettier: DefaultPrettier::default(), + prettiers_per_worktree: HashMap::default(), + prettier_instances: HashMap::default(), } } -} -impl DefaultPrettier { - pub fn instance(&self) -> Option<&PrettierInstance> { - if let PrettierInstallation::Installed(instance) = &self.prettier { - Some(instance) - } else { - None + pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { + let mut prettier_instances_to_clean = FuturesUnordered::new(); + if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) { + for path in prettier_paths.iter().flatten() { + if let Some(prettier_instance) = self.prettier_instances.remove(path) { + prettier_instances_to_clean.push(async move { + prettier_instance + .server() + .await + .map(|server| server.server_id()) + }); + } + } } + cx.spawn(|prettier_store, mut cx| async move { + while let Some(prettier_server_id) = prettier_instances_to_clean.next().await { + if let Some(prettier_server_id) = prettier_server_id { + prettier_store + .update(&mut cx, |_, cx| { + cx.emit(PrettierStoreEvent::LanguageServerRemoved( + prettier_server_id, + )); + }) + .ok(); + } + } + }) + .detach(); } - pub fn prettier_task( + fn prettier_instance_for_buffer( &mut self, - node: &Arc, - worktree_id: Option, - cx: &mut ModelContext<'_, Project>, - ) -> Option>> { - match &mut self.prettier { - PrettierInstallation::NotInstalled { .. } => { - Some(start_default_prettier(Arc::clone(node), worktree_id, cx)) + buffer: &Model, + cx: &mut ModelContext, + ) -> Task, PrettierTask)>> { + let buffer = buffer.read(cx); + let buffer_file = buffer.file(); + if buffer.language().is_none() { + return Task::ready(None); + } + + let node = self.node.clone(); + + match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx))) { + Some((worktree_id, buffer_path)) => { + let fs = Arc::clone(&self.fs); + let installed_prettiers = self.prettier_instances.keys().cloned().collect(); + cx.spawn(|lsp_store, mut cx| async move { + match cx + .background_executor() + .spawn(async move { + Prettier::locate_prettier_installation( + fs.as_ref(), + &installed_prettiers, + &buffer_path, + ) + .await + }) + .await + { + Ok(ControlFlow::Break(())) => None, + Ok(ControlFlow::Continue(None)) => { + let default_instance = lsp_store + .update(&mut cx, |lsp_store, cx| { + lsp_store + .prettiers_per_worktree + .entry(worktree_id) + .or_default() + .insert(None); + lsp_store.default_prettier.prettier_task( + &node, + Some(worktree_id), + cx, + ) + }) + .ok()?; + Some((None, default_instance?.log_err().await?)) + } + Ok(ControlFlow::Continue(Some(prettier_dir))) => { + lsp_store + .update(&mut cx, |lsp_store, _| { + lsp_store + .prettiers_per_worktree + .entry(worktree_id) + .or_default() + .insert(Some(prettier_dir.clone())) + }) + .ok()?; + if let Some(prettier_task) = lsp_store + .update(&mut cx, |lsp_store, cx| { + lsp_store.prettier_instances.get_mut(&prettier_dir).map( + |existing_instance| { + existing_instance.prettier_task( + &node, + Some(&prettier_dir), + Some(worktree_id), + cx, + ) + }, + ) + }) + .ok()? + { + log::debug!("Found already started prettier in {prettier_dir:?}"); + return Some((Some(prettier_dir), prettier_task?.await.log_err()?)); + } + + log::info!("Found prettier in {prettier_dir:?}, starting."); + let new_prettier_task = lsp_store + .update(&mut cx, |lsp_store, cx| { + let new_prettier_task = Self::start_prettier( + node, + prettier_dir.clone(), + Some(worktree_id), + cx, + ); + lsp_store.prettier_instances.insert( + prettier_dir.clone(), + PrettierInstance { + attempt: 0, + prettier: Some(new_prettier_task.clone()), + }, + ); + new_prettier_task + }) + .ok()?; + Some((Some(prettier_dir), new_prettier_task)) + } + Err(e) => { + log::error!("Failed to determine prettier path for buffer: {e:#}"); + None + } + } + }) } - PrettierInstallation::Installed(existing_instance) => { - existing_instance.prettier_task(node, None, worktree_id, cx) + None => { + let new_task = self.default_prettier.prettier_task(&node, None, cx); + cx.spawn(|_, _| async move { Some((None, new_task?.log_err().await?)) }) } } } -} -impl PrettierInstance { - pub fn prettier_task( - &mut self, - node: &Arc, - prettier_dir: Option<&Path>, + fn start_prettier( + node: Arc, + prettier_dir: PathBuf, worktree_id: Option, - cx: &mut ModelContext<'_, Project>, - ) -> Option>> { - if self.attempt > prettier::FAIL_THRESHOLD { - match prettier_dir { - Some(prettier_dir) => log::warn!( - "Prettier from path {prettier_dir:?} exceeded launch threshold, not starting" - ), - None => log::warn!("Default prettier exceeded launch threshold, not starting"), - } - return None; - } - Some(match &self.prettier { - Some(prettier_task) => Task::ready(Ok(prettier_task.clone())), - None => match prettier_dir { - Some(prettier_dir) => { - let new_task = start_prettier( - Arc::clone(node), - prettier_dir.to_path_buf(), - worktree_id, - cx, - ); - self.attempt += 1; - self.prettier = Some(new_task.clone()); - Task::ready(Ok(new_task)) - } - None => { - self.attempt += 1; - let node = Arc::clone(node); - cx.spawn(|project, mut cx| async move { - project - .update(&mut cx, |_, cx| { - start_default_prettier(node, worktree_id, cx) - })? - .await - }) - } - }, + cx: &mut ModelContext, + ) -> PrettierTask { + cx.spawn(|prettier_store, mut cx| async move { + log::info!("Starting prettier at path {prettier_dir:?}"); + let new_server_id = prettier_store.update(&mut cx, |prettier_store, _| { + prettier_store.languages.next_language_server_id() + })?; + + let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone()) + .await + .context("default prettier spawn") + .map(Arc::new) + .map_err(Arc::new)?; + Self::register_new_prettier( + &prettier_store, + &new_prettier, + worktree_id, + new_server_id, + &mut cx, + ); + Ok(new_prettier) }) + .shared() } - pub async fn server(&self) -> Option> { - self.prettier.clone()?.await.ok()?.server().cloned() - } -} - -fn start_default_prettier( - node: Arc, - worktree_id: Option, - cx: &mut ModelContext<'_, Project>, -) -> Task> { - cx.spawn(|project, mut cx| async move { - let installation_task = project.update(&mut cx, |project, _| { - match &project.default_prettier.prettier { - PrettierInstallation::NotInstalled { - installation_task, .. - } => ControlFlow::Continue(installation_task.clone()), - PrettierInstallation::Installed(default_prettier) => { - ControlFlow::Break(default_prettier.clone()) - } - } - })?; - match installation_task { - ControlFlow::Continue(None) => { - anyhow::bail!("Default prettier is not installed and cannot be started") - } - ControlFlow::Continue(Some(installation_task)) => { - log::info!("Waiting for default prettier to install"); - if let Err(e) = installation_task.await { - project.update(&mut cx, |project, _| { - if let PrettierInstallation::NotInstalled { - installation_task, - attempts, - .. - } = &mut project.default_prettier.prettier - { - *installation_task = None; - *attempts += 1; - } - })?; - anyhow::bail!( - "Cannot start default prettier due to its installation failure: {e:#}" - ); + fn start_default_prettier( + node: Arc, + worktree_id: Option, + cx: &mut ModelContext, + ) -> Task> { + cx.spawn(|prettier_store, mut cx| async move { + let installation_task = prettier_store.update(&mut cx, |prettier_store, _| { + match &prettier_store.default_prettier.prettier { + PrettierInstallation::NotInstalled { + installation_task, .. + } => ControlFlow::Continue(installation_task.clone()), + PrettierInstallation::Installed(default_prettier) => { + ControlFlow::Break(default_prettier.clone()) + } + } + })?; + match installation_task { + ControlFlow::Continue(None) => { + anyhow::bail!("Default prettier is not installed and cannot be started") } - let new_default_prettier = project.update(&mut cx, |project, cx| { + ControlFlow::Continue(Some(installation_task)) => { + log::info!("Waiting for default prettier to install"); + if let Err(e) = installation_task.await { + prettier_store.update(&mut cx, |project, _| { + if let PrettierInstallation::NotInstalled { + installation_task, + attempts, + .. + } = &mut project.default_prettier.prettier + { + *installation_task = None; + *attempts += 1; + } + })?; + anyhow::bail!( + "Cannot start default prettier due to its installation failure: {e:#}" + ); + } let new_default_prettier = - start_prettier(node, default_prettier_dir().clone(), worktree_id, cx); - project.default_prettier.prettier = - PrettierInstallation::Installed(PrettierInstance { - attempt: 0, - prettier: Some(new_default_prettier.clone()), - }); - new_default_prettier - })?; - Ok(new_default_prettier) - } - ControlFlow::Break(instance) => match instance.prettier { - Some(instance) => Ok(instance), - None => { - let new_default_prettier = project.update(&mut cx, |project, cx| { - let new_default_prettier = - start_prettier(node, default_prettier_dir().clone(), worktree_id, cx); - project.default_prettier.prettier = - PrettierInstallation::Installed(PrettierInstance { - attempt: instance.attempt + 1, - prettier: Some(new_default_prettier.clone()), - }); - new_default_prettier - })?; + prettier_store.update(&mut cx, |prettier_store, cx| { + let new_default_prettier = Self::start_prettier( + node, + default_prettier_dir().clone(), + worktree_id, + cx, + ); + prettier_store.default_prettier.prettier = + PrettierInstallation::Installed(PrettierInstance { + attempt: 0, + prettier: Some(new_default_prettier.clone()), + }); + new_default_prettier + })?; Ok(new_default_prettier) } - }, - } - }) -} - -fn start_prettier( - node: Arc, - prettier_dir: PathBuf, - worktree_id: Option, - cx: &mut ModelContext<'_, Project>, -) -> PrettierTask { - cx.spawn(|project, mut cx| async move { - log::info!("Starting prettier at path {prettier_dir:?}"); - let new_server_id = project.update(&mut cx, |project, _| { - project.languages.next_language_server_id() - })?; - - let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone()) - .await - .context("default prettier spawn") - .map(Arc::new) - .map_err(Arc::new)?; - register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx); - Ok(new_prettier) - }) - .shared() -} - -fn register_new_prettier( - project: &WeakModel, - prettier: &Prettier, - worktree_id: Option, - new_server_id: LanguageServerId, - cx: &mut AsyncAppContext, -) { - let prettier_dir = prettier.prettier_dir(); - let is_default = prettier.is_default(); - if is_default { - log::info!("Started default prettier in {prettier_dir:?}"); - } else { - log::info!("Started prettier in {prettier_dir:?}"); + ControlFlow::Break(instance) => match instance.prettier { + Some(instance) => Ok(instance), + None => { + let new_default_prettier = + prettier_store.update(&mut cx, |prettier_store, cx| { + let new_default_prettier = Self::start_prettier( + node, + default_prettier_dir().clone(), + worktree_id, + cx, + ); + prettier_store.default_prettier.prettier = + PrettierInstallation::Installed(PrettierInstance { + attempt: instance.attempt + 1, + prettier: Some(new_default_prettier.clone()), + }); + new_default_prettier + })?; + Ok(new_default_prettier) + } + }, + } + }) } - if let Some(prettier_server) = prettier.server() { - project - .update(cx, |project, cx| { - let name = if is_default { - LanguageServerName(Arc::from("prettier (default)")) - } else { - let worktree_path = worktree_id - .and_then(|id| project.worktree_for_id(id, cx)) - .map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path())); - let name = match worktree_path { - Some(worktree_path) => { - if prettier_dir == worktree_path.as_ref() { - let name = prettier_dir - .file_name() - .and_then(|name| name.to_str()) - .unwrap_or_default(); - format!("prettier ({name})") - } else { - let dir_to_display = prettier_dir - .strip_prefix(worktree_path.as_ref()) - .ok() - .unwrap_or(prettier_dir); - format!("prettier ({})", dir_to_display.display()) + + fn register_new_prettier( + prettier_store: &WeakModel, + prettier: &Prettier, + worktree_id: Option, + new_server_id: LanguageServerId, + cx: &mut AsyncAppContext, + ) { + let prettier_dir = prettier.prettier_dir(); + let is_default = prettier.is_default(); + if is_default { + log::info!("Started default prettier in {prettier_dir:?}"); + } else { + log::info!("Started prettier in {prettier_dir:?}"); + } + if let Some(prettier_server) = prettier.server() { + prettier_store + .update(cx, |prettier_store, cx| { + let name = if is_default { + LanguageServerName(Arc::from("prettier (default)")) + } else { + let worktree_path = worktree_id + .and_then(|id| { + prettier_store + .worktree_store + .read(cx) + .worktree_for_id(id, cx) + }) + .map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path())); + let name = match worktree_path { + Some(worktree_path) => { + if prettier_dir == worktree_path.as_ref() { + let name = prettier_dir + .file_name() + .and_then(|name| name.to_str()) + .unwrap_or_default(); + format!("prettier ({name})") + } else { + let dir_to_display = prettier_dir + .strip_prefix(worktree_path.as_ref()) + .ok() + .unwrap_or(prettier_dir); + format!("prettier ({})", dir_to_display.display()) + } } - } - None => format!("prettier ({})", prettier_dir.display()), + None => format!("prettier ({})", prettier_dir.display()), + }; + LanguageServerName(Arc::from(name)) }; - LanguageServerName(Arc::from(name)) - }; - project.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.register_supplementary_language_server( + cx.emit(PrettierStoreEvent::LanguageServerAdded { new_server_id, name, - Arc::clone(prettier_server), - cx, - ) - }); - }) - .ok(); - } -} - -async fn install_prettier_packages( - fs: &dyn Fs, - plugins_to_install: HashSet>, - node: Arc, -) -> anyhow::Result<()> { - let packages_to_versions = future::try_join_all( - plugins_to_install - .iter() - .chain(Some(&"prettier".into())) - .map(|package_name| async { - let returned_package_name = package_name.to_string(); - let latest_version = node - .npm_package_latest_version(package_name) - .await - .with_context(|| { - format!("fetching latest npm version for package {returned_package_name}") - })?; - anyhow::Ok((returned_package_name, latest_version)) - }), - ) - .await - .context("fetching latest npm versions")?; - - let default_prettier_dir = default_prettier_dir().as_path(); - match fs.metadata(default_prettier_dir).await.with_context(|| { - format!("fetching FS metadata for default prettier dir {default_prettier_dir:?}") - })? { - Some(prettier_dir_metadata) => anyhow::ensure!( - prettier_dir_metadata.is_dir, - "default prettier dir {default_prettier_dir:?} is not a directory" - ), - None => fs - .create_dir(default_prettier_dir) - .await - .with_context(|| format!("creating default prettier dir {default_prettier_dir:?}"))?, - } - - log::info!("Installing default prettier and plugins: {packages_to_versions:?}"); - let borrowed_packages = packages_to_versions - .iter() - .map(|(package, version)| (package.as_str(), version.as_str())) - .collect::>(); - node.npm_install_packages(default_prettier_dir, &borrowed_packages) - .await - .context("fetching formatter packages")?; - anyhow::Ok(()) -} - -async fn save_prettier_server_file(fs: &dyn Fs) -> anyhow::Result<()> { - let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); - fs.save( - &prettier_wrapper_path, - &text::Rope::from(prettier::PRETTIER_SERVER_JS), - text::LineEnding::Unix, - ) - .await - .with_context(|| { - format!( - "writing {} file at {prettier_wrapper_path:?}", - prettier::PRETTIER_SERVER_FILE - ) - })?; - Ok(()) -} - -async fn should_write_prettier_server_file(fs: &dyn Fs) -> bool { - let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); - if !fs.is_file(&prettier_wrapper_path).await { - return true; + prettier_server: prettier_server.clone(), + }); + }) + .ok(); + } } - let Ok(prettier_server_file_contents) = fs.load(&prettier_wrapper_path).await else { - return true; - }; - prettier_server_file_contents != prettier::PRETTIER_SERVER_JS -} -impl Project { pub fn update_prettier_settings( &self, worktree: &Model, changes: &[(Arc, ProjectEntryId, PathChange)], - cx: &mut ModelContext<'_, Project>, + cx: &mut ModelContext, ) { let prettier_config_files = Prettier::CONFIG_FILE_NAMES .iter() @@ -510,122 +450,6 @@ impl Project { } } - fn prettier_instance_for_buffer( - &mut self, - buffer: &Model, - cx: &mut ModelContext, - ) -> Task, PrettierTask)>> { - // todo(ssh remote): prettier support - if self.is_via_collab() || self.ssh_session.is_some() { - return Task::ready(None); - } - let buffer = buffer.read(cx); - let buffer_file = buffer.file(); - if buffer.language().is_none() { - return Task::ready(None); - } - let Some(node) = self.node.clone() else { - return Task::ready(None); - }; - match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx))) { - Some((worktree_id, buffer_path)) => { - let fs = Arc::clone(&self.fs); - let installed_prettiers = self.prettier_instances.keys().cloned().collect(); - cx.spawn(|project, mut cx| async move { - match cx - .background_executor() - .spawn(async move { - Prettier::locate_prettier_installation( - fs.as_ref(), - &installed_prettiers, - &buffer_path, - ) - .await - }) - .await - { - Ok(ControlFlow::Break(())) => None, - Ok(ControlFlow::Continue(None)) => { - let default_instance = project - .update(&mut cx, |project, cx| { - project - .prettiers_per_worktree - .entry(worktree_id) - .or_default() - .insert(None); - project.default_prettier.prettier_task( - &node, - Some(worktree_id), - cx, - ) - }) - .ok()?; - Some((None, default_instance?.log_err().await?)) - } - Ok(ControlFlow::Continue(Some(prettier_dir))) => { - project - .update(&mut cx, |project, _| { - project - .prettiers_per_worktree - .entry(worktree_id) - .or_default() - .insert(Some(prettier_dir.clone())) - }) - .ok()?; - if let Some(prettier_task) = project - .update(&mut cx, |project, cx| { - project.prettier_instances.get_mut(&prettier_dir).map( - |existing_instance| { - existing_instance.prettier_task( - &node, - Some(&prettier_dir), - Some(worktree_id), - cx, - ) - }, - ) - }) - .ok()? - { - log::debug!("Found already started prettier in {prettier_dir:?}"); - return Some((Some(prettier_dir), prettier_task?.await.log_err()?)); - } - - log::info!("Found prettier in {prettier_dir:?}, starting."); - let new_prettier_task = project - .update(&mut cx, |project, cx| { - let new_prettier_task = start_prettier( - node, - prettier_dir.clone(), - Some(worktree_id), - cx, - ); - project.prettier_instances.insert( - prettier_dir.clone(), - PrettierInstance { - attempt: 0, - prettier: Some(new_prettier_task.clone()), - }, - ); - new_prettier_task - }) - .ok()?; - Some((Some(prettier_dir), new_prettier_task)) - } - Err(e) => { - log::error!("Failed to determine prettier path for buffer: {e:#}"); - None - } - } - }) - } - None => { - let new_task = self.default_prettier.prettier_task(&node, None, cx); - cx.spawn(|_, _| async move { Some((None, new_task?.log_err().await?)) }) - } - } - } - pub fn install_default_prettier( &mut self, worktree: Option, @@ -642,12 +466,13 @@ impl Project { } let mut new_plugins = plugins.collect::>(); - let Some(node) = self.node.as_ref().cloned() else { - return; - }; + let node = self.node.clone(); + let fs = Arc::clone(&self.fs); let locate_prettier_installation = match worktree.and_then(|worktree_id| { - self.worktree_for_id(worktree_id, cx) + self.worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) .map(|worktree| worktree.read(cx).abs_path()) }) { Some(locate_from) => { @@ -777,4 +602,291 @@ impl Project { not_installed_plugins: plugins_to_install, }; } + + pub fn on_settings_changed( + &mut self, + language_formatters_to_check: Vec<(Option, LanguageSettings)>, + cx: &mut ModelContext, + ) { + let mut prettier_plugins_by_worktree = HashMap::default(); + for (worktree, language_settings) in language_formatters_to_check { + if let Some(plugins) = prettier_plugins_for_language(&language_settings) { + prettier_plugins_by_worktree + .entry(worktree) + .or_insert_with(HashSet::default) + .extend(plugins.iter().cloned()); + } + } + for (worktree, prettier_plugins) in prettier_plugins_by_worktree { + self.install_default_prettier( + worktree, + prettier_plugins.into_iter().map(Arc::from), + cx, + ); + } + } +} + +pub fn prettier_plugins_for_language( + language_settings: &LanguageSettings, +) -> Option<&HashSet> { + match &language_settings.formatter { + SelectedFormatter::Auto => Some(&language_settings.prettier.plugins), + + SelectedFormatter::List(list) => list + .as_ref() + .contains(&Formatter::Prettier) + .then_some(&language_settings.prettier.plugins), + } +} + +pub(super) async fn format_with_prettier( + prettier_store: &WeakModel, + buffer: &Model, + cx: &mut AsyncAppContext, +) -> Option> { + let prettier_instance = prettier_store + .update(cx, |prettier_store, cx| { + prettier_store.prettier_instance_for_buffer(buffer, cx) + }) + .ok()? + .await; + + let (prettier_path, prettier_task) = prettier_instance?; + + let prettier_description = match prettier_path.as_ref() { + Some(path) => format!("prettier at {path:?}"), + None => "default prettier instance".to_string(), + }; + + match prettier_task.await { + Ok(prettier) => { + let buffer_path = buffer + .update(cx, |buffer, cx| { + File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) + }) + .ok() + .flatten(); + + let format_result = prettier + .format(buffer, buffer_path, cx) + .await + .map(FormatOperation::Prettier) + .with_context(|| format!("{} failed to format buffer", prettier_description)); + + Some(format_result) + } + Err(error) => { + prettier_store + .update(cx, |project, _| { + let instance_to_update = match prettier_path { + Some(prettier_path) => project.prettier_instances.get_mut(&prettier_path), + None => match &mut project.default_prettier.prettier { + PrettierInstallation::NotInstalled { .. } => None, + PrettierInstallation::Installed(instance) => Some(instance), + }, + }; + + if let Some(instance) = instance_to_update { + instance.attempt += 1; + instance.prettier = None; + } + }) + .log_err(); + + Some(Err(anyhow!( + "{} failed to spawn: {error:#}", + prettier_description + ))) + } + } +} + +pub struct DefaultPrettier { + prettier: PrettierInstallation, + installed_plugins: HashSet>, +} + +#[derive(Debug)] +pub enum PrettierInstallation { + NotInstalled { + attempts: usize, + installation_task: Option>>>>, + not_installed_plugins: HashSet>, + }, + Installed(PrettierInstance), +} + +pub type PrettierTask = Shared, Arc>>>; + +#[derive(Debug, Clone)] +pub struct PrettierInstance { + attempt: usize, + prettier: Option, +} + +impl Default for DefaultPrettier { + fn default() -> Self { + Self { + prettier: PrettierInstallation::NotInstalled { + attempts: 0, + installation_task: None, + not_installed_plugins: HashSet::default(), + }, + installed_plugins: HashSet::default(), + } + } +} + +impl DefaultPrettier { + pub fn instance(&self) -> Option<&PrettierInstance> { + if let PrettierInstallation::Installed(instance) = &self.prettier { + Some(instance) + } else { + None + } + } + + pub fn prettier_task( + &mut self, + node: &Arc, + worktree_id: Option, + cx: &mut ModelContext, + ) -> Option>> { + match &mut self.prettier { + PrettierInstallation::NotInstalled { .. } => Some( + PrettierStore::start_default_prettier(node.clone(), worktree_id, cx), + ), + PrettierInstallation::Installed(existing_instance) => { + existing_instance.prettier_task(node, None, worktree_id, cx) + } + } + } +} + +impl PrettierInstance { + pub fn prettier_task( + &mut self, + node: &Arc, + prettier_dir: Option<&Path>, + worktree_id: Option, + cx: &mut ModelContext, + ) -> Option>> { + if self.attempt > prettier::FAIL_THRESHOLD { + match prettier_dir { + Some(prettier_dir) => log::warn!( + "Prettier from path {prettier_dir:?} exceeded launch threshold, not starting" + ), + None => log::warn!("Default prettier exceeded launch threshold, not starting"), + } + return None; + } + Some(match &self.prettier { + Some(prettier_task) => Task::ready(Ok(prettier_task.clone())), + None => match prettier_dir { + Some(prettier_dir) => { + let new_task = PrettierStore::start_prettier( + Arc::clone(node), + prettier_dir.to_path_buf(), + worktree_id, + cx, + ); + self.attempt += 1; + self.prettier = Some(new_task.clone()); + Task::ready(Ok(new_task)) + } + None => { + self.attempt += 1; + let node = Arc::clone(node); + cx.spawn(|prettier_store, mut cx| async move { + prettier_store + .update(&mut cx, |_, cx| { + PrettierStore::start_default_prettier(node, worktree_id, cx) + })? + .await + }) + } + }, + }) + } + + pub async fn server(&self) -> Option> { + self.prettier.clone()?.await.ok()?.server().cloned() + } +} + +async fn install_prettier_packages( + fs: &dyn Fs, + plugins_to_install: HashSet>, + node: Arc, +) -> anyhow::Result<()> { + let packages_to_versions = future::try_join_all( + plugins_to_install + .iter() + .chain(Some(&"prettier".into())) + .map(|package_name| async { + let returned_package_name = package_name.to_string(); + let latest_version = node + .npm_package_latest_version(package_name) + .await + .with_context(|| { + format!("fetching latest npm version for package {returned_package_name}") + })?; + anyhow::Ok((returned_package_name, latest_version)) + }), + ) + .await + .context("fetching latest npm versions")?; + + let default_prettier_dir = default_prettier_dir().as_path(); + match fs.metadata(default_prettier_dir).await.with_context(|| { + format!("fetching FS metadata for default prettier dir {default_prettier_dir:?}") + })? { + Some(prettier_dir_metadata) => anyhow::ensure!( + prettier_dir_metadata.is_dir, + "default prettier dir {default_prettier_dir:?} is not a directory" + ), + None => fs + .create_dir(default_prettier_dir) + .await + .with_context(|| format!("creating default prettier dir {default_prettier_dir:?}"))?, + } + + log::info!("Installing default prettier and plugins: {packages_to_versions:?}"); + let borrowed_packages = packages_to_versions + .iter() + .map(|(package, version)| (package.as_str(), version.as_str())) + .collect::>(); + node.npm_install_packages(default_prettier_dir, &borrowed_packages) + .await + .context("fetching formatter packages")?; + anyhow::Ok(()) +} + +async fn save_prettier_server_file(fs: &dyn Fs) -> anyhow::Result<()> { + let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); + fs.save( + &prettier_wrapper_path, + &text::Rope::from(prettier::PRETTIER_SERVER_JS), + text::LineEnding::Unix, + ) + .await + .with_context(|| { + format!( + "writing {} file at {prettier_wrapper_path:?}", + prettier::PRETTIER_SERVER_FILE + ) + })?; + Ok(()) +} + +async fn should_write_prettier_server_file(fs: &dyn Fs) -> bool { + let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); + if !fs.is_file(&prettier_wrapper_path).await { + return true; + } + let Ok(prettier_server_file_contents) = fs.load(&prettier_wrapper_path).await else { + return true; + }; + prettier_server_file_contents != prettier::PRETTIER_SERVER_JS } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 4318737e3875b8..f4816cf0cde66f 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4,7 +4,7 @@ pub mod debounced_delay; pub mod lsp_command; pub mod lsp_ext_command; pub mod lsp_store; -mod prettier_support; +pub mod prettier_store; pub mod project_settings; pub mod search; mod task_inventory; @@ -31,7 +31,6 @@ pub use environment::ProjectEnvironment; use futures::{ channel::mpsc::{self, UnboundedReceiver}, future::try_join_all, - stream::FuturesUnordered, AsyncWriteExt, FutureExt, StreamExt, }; @@ -59,8 +58,8 @@ use lsp_command::*; use node_runtime::NodeRuntime; use parking_lot::{Mutex, RwLock}; use paths::{local_tasks_file_relative_path, local_vscode_tasks_file_relative_path}; -use prettier_support::{DefaultPrettier, PrettierInstance}; -use project_settings::{LspSettings, ProjectSettings, SettingsObserver}; +pub use prettier_store::PrettierStore; +use project_settings::{ProjectSettings, SettingsObserver}; use remote::SshSession; use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient, ErrorCode}; use search::{SearchInputKind, SearchQuery, SearchResult}; @@ -140,7 +139,6 @@ pub struct Project { buffer_ordered_messages_tx: mpsc::UnboundedSender, languages: Arc, client: Arc, - current_lsp_settings: HashMap, LspSettings>, join_project_response_message_id: u32, user_store: Model, fs: Arc, @@ -157,9 +155,6 @@ pub struct Project { remotely_created_buffers: Arc>, terminals: Terminals, node: Option>, - default_prettier: DefaultPrettier, - prettiers_per_worktree: HashMap>>, - prettier_instances: HashMap, tasks: Model, hosted_project_id: Option, dev_server_project_id: Option, @@ -634,6 +629,16 @@ impl Project { cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); + let prettier_store = cx.new_model(|cx| { + PrettierStore::new( + node.clone(), + fs.clone(), + languages.clone(), + worktree_store.clone(), + cx, + ) + }); + let settings_observer = cx.new_model(|cx| { SettingsObserver::new_local(fs.clone(), worktree_store.clone(), cx) }); @@ -643,6 +648,7 @@ impl Project { LspStore::new_local( buffer_store.clone(), worktree_store.clone(), + prettier_store.clone(), environment.clone(), languages.clone(), Some(client.http_client()), @@ -658,14 +664,10 @@ impl Project { worktree_store, buffer_store, lsp_store, - current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), join_project_response_message_id: 0, client_state: ProjectClientState::Local, client_subscriptions: Vec::new(), - _subscriptions: vec![ - cx.observe_global::(Self::on_settings_changed), - cx.on_release(Self::release), - ], + _subscriptions: vec![cx.on_release(Self::release)], active_entry: None, snippets, languages, @@ -680,9 +682,6 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - default_prettier: DefaultPrettier::default(), - prettiers_per_worktree: HashMap::default(), - prettier_instances: HashMap::default(), tasks, hosted_project_id: None, dev_server_project_id: None, @@ -751,14 +750,10 @@ impl Project { worktree_store, buffer_store, lsp_store, - current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), join_project_response_message_id: 0, client_state: ProjectClientState::Local, client_subscriptions: Vec::new(), - _subscriptions: vec![ - cx.observe_global::(Self::on_settings_changed), - cx.on_release(Self::release), - ], + _subscriptions: vec![cx.on_release(Self::release)], active_entry: None, snippets, languages, @@ -773,9 +768,6 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - default_prettier: DefaultPrettier::default(), - prettiers_per_worktree: HashMap::default(), - prettier_instances: HashMap::default(), tasks, hosted_project_id: None, dev_server_project_id: None, @@ -928,7 +920,6 @@ impl Project { buffer_store: buffer_store.clone(), worktree_store: worktree_store.clone(), lsp_store: lsp_store.clone(), - current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), active_entry: None, collaborators: Default::default(), join_project_response_message_id: response.message_id, @@ -954,9 +945,6 @@ impl Project { local_handles: Vec::new(), }, node: None, - default_prettier: DefaultPrettier::default(), - prettiers_per_worktree: HashMap::default(), - prettier_instances: HashMap::default(), tasks, hosted_project_id: None, dev_server_project_id: response @@ -1176,112 +1164,6 @@ impl Project { self.worktree_store.clone() } - fn on_settings_changed(&mut self, cx: &mut ModelContext) { - let mut language_servers_to_start = Vec::new(); - let mut language_formatters_to_check = Vec::new(); - for buffer in self.buffer_store.read(cx).buffers() { - let buffer = buffer.read(cx); - let buffer_file = File::from_dyn(buffer.file()); - let buffer_language = buffer.language(); - let settings = language_settings(buffer_language, buffer.file(), cx); - if let Some(language) = buffer_language { - if settings.enable_language_server { - if let Some(file) = buffer_file { - language_servers_to_start.push((file.worktree.clone(), language.name())); - } - } - language_formatters_to_check - .push((buffer_file.map(|f| f.worktree_id(cx)), settings.clone())); - } - } - - let mut language_servers_to_stop = Vec::new(); - let mut language_servers_to_restart = Vec::new(); - let languages = self.languages.to_vec(); - - let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone(); - let current_lsp_settings = &self.current_lsp_settings; - for (worktree_id, started_lsp_name) in self.lsp_store.read(cx).started_language_servers() { - let language = languages.iter().find_map(|l| { - let adapter = self - .languages - .lsp_adapters(&l.name()) - .iter() - .find(|adapter| adapter.name == started_lsp_name)? - .clone(); - Some((l, adapter)) - }); - if let Some((language, adapter)) = language { - let worktree = self.worktree_for_id(worktree_id, cx); - let file = worktree.as_ref().and_then(|tree| { - tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _)) - }); - if !language_settings(Some(language), file.as_ref(), cx).enable_language_server { - language_servers_to_stop.push((worktree_id, started_lsp_name.clone())); - } else if let Some(worktree) = worktree { - let server_name = &adapter.name.0; - match ( - current_lsp_settings.get(server_name), - new_lsp_settings.get(server_name), - ) { - (None, None) => {} - (Some(_), None) | (None, Some(_)) => { - language_servers_to_restart.push((worktree, language.name())); - } - (Some(current_lsp_settings), Some(new_lsp_settings)) => { - if current_lsp_settings != new_lsp_settings { - language_servers_to_restart.push((worktree, language.name())); - } - } - } - } - } - } - self.current_lsp_settings = new_lsp_settings; - - // Stop all newly-disabled language servers. - self.lsp_store.update(cx, |lsp_store, cx| { - for (worktree_id, adapter_name) in language_servers_to_stop { - lsp_store - .stop_language_server(worktree_id, adapter_name, cx) - .detach(); - } - }); - - let mut prettier_plugins_by_worktree = HashMap::default(); - for (worktree, language_settings) in language_formatters_to_check { - if let Some(plugins) = - prettier_support::prettier_plugins_for_language(&language_settings) - { - prettier_plugins_by_worktree - .entry(worktree) - .or_insert_with(HashSet::default) - .extend(plugins.iter().cloned()); - } - } - for (worktree, prettier_plugins) in prettier_plugins_by_worktree { - self.install_default_prettier( - worktree, - prettier_plugins.into_iter().map(Arc::from), - cx, - ); - } - - // Start all the newly-enabled language servers. - self.lsp_store.update(cx, |lsp_store, cx| { - for (worktree, language) in language_servers_to_start { - lsp_store.start_language_servers(&worktree, language, cx); - } - - // Restart all language servers with changed initialization options. - for (worktree, language) in language_servers_to_restart { - lsp_store.restart_language_servers(worktree, language, cx); - } - }); - - cx.notify(); - } - pub fn buffer_for_id(&self, remote_id: BufferId, cx: &AppContext) -> Option> { self.buffer_store.read(cx).get(remote_id) } @@ -2160,24 +2042,10 @@ impl Project { buffer, new_language, } => { - let Some(new_language) = new_language else { + let Some(_) = new_language else { cx.emit(Event::LanguageNotFound(buffer.clone())); return; }; - let buffer_file = buffer.read(cx).file().cloned(); - let settings = - language_settings(Some(new_language), buffer_file.as_ref(), cx).clone(); - let buffer_file = File::from_dyn(buffer_file.as_ref()); - let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx)); - if let Some(prettier_plugins) = - prettier_support::prettier_plugins_for_language(&settings) - { - self.install_default_prettier( - worktree, - prettier_plugins.iter().map(|s| Arc::from(s.as_str())), - cx, - ); - }; } LspStoreEvent::RefreshInlayHints => cx.emit(Event::RefreshInlayHints), LspStoreEvent::LanguageServerPrompt(prompt) => { @@ -2253,7 +2121,6 @@ impl Project { worktree::Event::UpdatedEntries(changes) => { if is_local { this.update_local_worktree_settings(&worktree, changes, cx); - this.update_prettier_settings(&worktree, changes, cx); } cx.emit(Event::WorktreeUpdatedEntries( @@ -2300,37 +2167,6 @@ impl Project { return; } - let mut prettier_instances_to_clean = FuturesUnordered::new(); - if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) { - for path in prettier_paths.iter().flatten() { - if let Some(prettier_instance) = self.prettier_instances.remove(path) { - prettier_instances_to_clean.push(async move { - prettier_instance - .server() - .await - .map(|server| server.server_id()) - }); - } - } - } - cx.spawn(|project, mut cx| async move { - while let Some(prettier_server_id) = prettier_instances_to_clean.next().await { - if let Some(prettier_server_id) = prettier_server_id { - project - .update(&mut cx, |project, cx| { - project.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.unregister_supplementary_language_server( - prettier_server_id, - cx, - ); - }); - }) - .ok(); - } - } - }) - .detach(); - self.task_inventory().update(cx, |inventory, _| { inventory.remove_worktree_sources(id_to_remove); }); @@ -3059,11 +2895,21 @@ impl Project { None } } - Formatter::Prettier => prettier_support::format_with_prettier(&project, buffer, cx) - .await - .transpose() - .ok() - .flatten(), + Formatter::Prettier => { + let prettier = project.update(cx, |project, cx| { + project + .lsp_store + .read(cx) + .prettier_store() + .unwrap() + .downgrade() + })?; + prettier_store::format_with_prettier(&prettier, buffer, cx) + .await + .transpose() + .ok() + .flatten() + } Formatter::External { command, arguments } => { let buffer_abs_path = buffer_abs_path.as_ref().map(|path| path.as_path()); Self::format_via_external_command(buffer, buffer_abs_path, command, arguments, cx) diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index f5efa21bd0fb6a..ed12b41167cc23 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -26,6 +26,7 @@ env_logger.workspace = true fs.workspace = true futures.workspace = true gpui.workspace = true +node_runtime.workspace = true log.workspace = true project.workspace = true remote.workspace = true diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 35d6630c1e11be..ec26bddfc3e7ab 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -2,12 +2,13 @@ use anyhow::{anyhow, Result}; use fs::Fs; use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext}; use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry}; +use node_runtime::DummyNodeRuntime; use project::{ buffer_store::{BufferStore, BufferStoreEvent}, project_settings::SettingsObserver, search::SearchQuery, worktree_store::WorktreeStore, - LspStore, LspStoreEvent, ProjectPath, WorktreeId, + LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId, }; use remote::SshSession; use rpc::{ @@ -54,6 +55,16 @@ impl HeadlessProject { buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx); buffer_store }); + let prettier_store = cx.new_model(|cx| { + PrettierStore::new( + DummyNodeRuntime::new(), + fs.clone(), + languages.clone(), + worktree_store.clone(), + cx, + ) + }); + let settings_observer = cx.new_model(|cx| { let mut observer = SettingsObserver::new_local(fs.clone(), worktree_store.clone(), cx); observer.shared(SSH_PROJECT_ID, session.clone().into(), cx); @@ -64,6 +75,7 @@ impl HeadlessProject { let mut lsp_store = LspStore::new_local( buffer_store.clone(), worktree_store.clone(), + prettier_store.clone(), environment, languages.clone(), None, From 56f9e4c7b3834826c01a75f0a883e594a8482d90 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 17 Sep 2024 15:39:44 -0700 Subject: [PATCH 166/762] Remove visible 'TBD' from docs (#17979) Release Notes: - N/A --- docs/src/extensions/languages.md | 2 ++ docs/src/key-bindings.md | 2 ++ docs/src/languages/javascript.md | 2 ++ 3 files changed, 6 insertions(+) diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index 90de3f658d831a..c00328530322b2 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -284,7 +284,9 @@ The `@run` capture specifies where the run button should appear in the editor. O | @run | Captures the script name | | @script | Also captures the script name (for different purposes) | + ## Language Servers diff --git a/docs/src/key-bindings.md b/docs/src/key-bindings.md index 989e101e7d9fbd..b35c894071bbf1 100644 --- a/docs/src/key-bindings.md +++ b/docs/src/key-bindings.md @@ -177,8 +177,10 @@ See the [tasks documentation](tasks.md#custom-keybindings-for-tasks) for more. #### Global + | **Command** | **Target** | **Default Shortcut** | | ------------------------- | ------------ | ----------------------- | diff --git a/docs/src/languages/javascript.md b/docs/src/languages/javascript.md index 0e642527e08f46..8fb84881ada0f8 100644 --- a/docs/src/languages/javascript.md +++ b/docs/src/languages/javascript.md @@ -26,6 +26,7 @@ For example, if you have Prettier installed and on your `PATH`, you can use it t } ``` + ## ESLint From fbb402ef12b0e61bc02db7a1715d0a36df1b94a7 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Wed, 18 Sep 2024 06:45:08 +0800 Subject: [PATCH 167/762] windows: Remove the use of `DispatcherQueue` and fix `FileSaveDialog` unresponsive issue (#17946) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #17069, closes #12410 With the help of @kennykerr (Creator of C++/WinRT and the crate `windows-rs`, Engineer on the Windows team at Microsoft) and @riverar (Windows Development expert), we discovered that this bug only occurs when an IME with a candidate window, such as Microsoft Pinyin IME, is active. In this case, the `FileSaveDialog` becomes unresponsive—while the dialog itself appears to be functioning, it doesn't accept any mouse or keyboard input. After a period of debugging and testing, I found that this issue only arises when using `DispatcherQueue` to dispatch runnables on the UI thread. After @kennykerr’s further investigation, Kenny identified that this is a bug with `DispatcherQueue`, and he recommended to avoid using `DispatcherQueue`. Given the uncertainty about whether Microsoft will address this bug in the foreseeable future, I have removed the use of `DispatcherQueue`. Co-authored-by: Kenny Release Notes: - N/A --------- Co-authored-by: Kenny --- Cargo.toml | 2 +- crates/gpui/Cargo.toml | 4 +- .../gpui/src/platform/windows/dispatcher.rs | 57 ++++-------- crates/gpui/src/platform/windows/events.rs | 3 + crates/gpui/src/platform/windows/platform.rs | 90 ++++++++++++++----- crates/gpui/src/platform/windows/window.rs | 19 ++-- 6 files changed, 104 insertions(+), 71 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0b392e02eb7b36..ec3138179b1c5a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -490,7 +490,6 @@ features = [ "implement", "Foundation_Numerics", "Storage", - "System", "System_Threading", "UI_ViewManagement", "Wdk_System_SystemServices", @@ -521,6 +520,7 @@ features = [ "Win32_UI_Input_Ime", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", + "Win32_UI_Shell_Common", "Win32_UI_WindowsAndMessaging", ] diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 09b546fc3266d2..d0d75b73e97a7d 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -50,7 +50,7 @@ parking = "2.0.0" parking_lot.workspace = true postage.workspace = true profiling.workspace = true -rand = { optional = true, workspace = true} +rand = { optional = true, workspace = true } raw-window-handle = "0.6" refineable.workspace = true resvg = { version = "0.41.0", default-features = false } @@ -110,6 +110,7 @@ blade-graphics.workspace = true blade-macros.workspace = true blade-util.workspace = true bytemuck = "1" +flume = "0.11" [target.'cfg(target_os = "linux")'.dependencies] as-raw-xcb-connection = "1" @@ -117,7 +118,6 @@ ashpd.workspace = true calloop = "0.13.0" calloop-wayland-source = "0.3.0" cosmic-text = { git = "https://github.com/pop-os/cosmic-text", rev = "542b20c" } -flume = "0.11" wayland-backend = { version = "0.3.3", features = ["client_system", "dlopen"] } wayland-client = { version = "0.31.2" } wayland-cursor = "0.31.1" diff --git a/crates/gpui/src/platform/windows/dispatcher.rs b/crates/gpui/src/platform/windows/dispatcher.rs index abe40d2c2e34db..575e844051d9b7 100644 --- a/crates/gpui/src/platform/windows/dispatcher.rs +++ b/crates/gpui/src/platform/windows/dispatcher.rs @@ -3,51 +3,39 @@ use std::{ time::Duration, }; +use anyhow::Context; use async_task::Runnable; +use flume::Sender; use parking::Parker; use parking_lot::Mutex; use util::ResultExt; use windows::{ Foundation::TimeSpan, - System::{ - DispatcherQueue, DispatcherQueueController, DispatcherQueueHandler, - Threading::{ - ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemOptions, - WorkItemPriority, - }, - }, - Win32::System::WinRT::{ - CreateDispatcherQueueController, DispatcherQueueOptions, DQTAT_COM_NONE, - DQTYPE_THREAD_CURRENT, + System::Threading::{ + ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemOptions, + WorkItemPriority, }, + Win32::{Foundation::HANDLE, System::Threading::SetEvent}, }; -use crate::{PlatformDispatcher, TaskLabel}; +use crate::{PlatformDispatcher, SafeHandle, TaskLabel}; pub(crate) struct WindowsDispatcher { - controller: DispatcherQueueController, - main_queue: DispatcherQueue, + main_sender: Sender, + dispatch_event: SafeHandle, parker: Mutex, main_thread_id: ThreadId, } impl WindowsDispatcher { - pub(crate) fn new() -> Self { - let controller = unsafe { - let options = DispatcherQueueOptions { - dwSize: std::mem::size_of::() as u32, - threadType: DQTYPE_THREAD_CURRENT, - apartmentType: DQTAT_COM_NONE, - }; - CreateDispatcherQueueController(options).unwrap() - }; - let main_queue = controller.DispatcherQueue().unwrap(); + pub(crate) fn new(main_sender: Sender, dispatch_event: HANDLE) -> Self { + let dispatch_event = dispatch_event.into(); let parker = Mutex::new(Parker::new()); let main_thread_id = current().id(); WindowsDispatcher { - controller, - main_queue, + main_sender, + dispatch_event, parker, main_thread_id, } @@ -86,12 +74,6 @@ impl WindowsDispatcher { } } -impl Drop for WindowsDispatcher { - fn drop(&mut self) { - self.controller.ShutdownQueueAsync().log_err(); - } -} - impl PlatformDispatcher for WindowsDispatcher { fn is_main_thread(&self) -> bool { current().id() == self.main_thread_id @@ -105,14 +87,11 @@ impl PlatformDispatcher for WindowsDispatcher { } fn dispatch_on_main_thread(&self, runnable: Runnable) { - let handler = { - let mut task_wrapper = Some(runnable); - DispatcherQueueHandler::new(move || { - task_wrapper.take().unwrap().run(); - Ok(()) - }) - }; - self.main_queue.TryEnqueue(&handler).log_err(); + self.main_sender + .send(runnable) + .context("Dispatch on main thread failed") + .log_err(); + unsafe { SetEvent(*self.dispatch_event).log_err() }; } fn dispatch_after(&self, duration: Duration, runnable: Runnable) { diff --git a/crates/gpui/src/platform/windows/events.rs b/crates/gpui/src/platform/windows/events.rs index 0d55142ae9f338..b62f51f6d9b94e 100644 --- a/crates/gpui/src/platform/windows/events.rs +++ b/crates/gpui/src/platform/windows/events.rs @@ -177,6 +177,9 @@ fn handle_timer_msg( state_ptr: Rc, ) -> Option { if wparam.0 == SIZE_MOVE_LOOP_TIMER_ID { + for runnable in state_ptr.main_receiver.drain() { + runnable.run(); + } handle_paint_msg(handle, state_ptr) } else { None diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 934d9336d2f6c2..d9f08c2247adf0 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -8,6 +8,7 @@ use std::{ use ::util::ResultExt; use anyhow::{anyhow, Context, Result}; +use async_task::Runnable; use futures::channel::oneshot::{self, Receiver}; use itertools::Itertools; use parking_lot::RwLock; @@ -46,6 +47,8 @@ pub(crate) struct WindowsPlatform { raw_window_handles: RwLock>, // The below members will never change throughout the entire lifecycle of the app. icon: HICON, + main_receiver: flume::Receiver, + dispatch_event: HANDLE, background_executor: BackgroundExecutor, foreground_executor: ForegroundExecutor, text_system: Arc, @@ -89,7 +92,9 @@ impl WindowsPlatform { unsafe { OleInitialize(None).expect("unable to initialize Windows OLE"); } - let dispatcher = Arc::new(WindowsDispatcher::new()); + let (main_sender, main_receiver) = flume::unbounded::(); + let dispatch_event = unsafe { CreateEventW(None, false, false, None) }.unwrap(); + let dispatcher = Arc::new(WindowsDispatcher::new(main_sender, dispatch_event)); let background_executor = BackgroundExecutor::new(dispatcher.clone()); let foreground_executor = ForegroundExecutor::new(dispatcher); let bitmap_factory = ManuallyDrop::new(unsafe { @@ -113,6 +118,8 @@ impl WindowsPlatform { state, raw_window_handles, icon, + main_receiver, + dispatch_event, background_executor, foreground_executor, text_system, @@ -176,6 +183,24 @@ impl WindowsPlatform { lock.is_empty() } + + #[inline] + fn run_foreground_tasks(&self) { + for runnable in self.main_receiver.drain() { + runnable.run(); + } + } + + fn generate_creation_info(&self) -> WindowCreationInfo { + WindowCreationInfo { + icon: self.icon, + executor: self.foreground_executor.clone(), + current_cursor: self.state.borrow().current_cursor, + windows_version: self.windows_version, + validation_number: self.validation_number, + main_receiver: self.main_receiver.clone(), + } + } } impl Platform for WindowsPlatform { @@ -197,16 +222,21 @@ impl Platform for WindowsPlatform { begin_vsync(*vsync_event); 'a: loop { let wait_result = unsafe { - MsgWaitForMultipleObjects(Some(&[*vsync_event]), false, INFINITE, QS_ALLINPUT) + MsgWaitForMultipleObjects( + Some(&[*vsync_event, self.dispatch_event]), + false, + INFINITE, + QS_ALLINPUT, + ) }; match wait_result { // compositor clock ticked so we should draw a frame - WAIT_EVENT(0) => { - self.redraw_all(); - } + WAIT_EVENT(0) => self.redraw_all(), + // foreground tasks are dispatched + WAIT_EVENT(1) => self.run_foreground_tasks(), // Windows thread messages are posted - WAIT_EVENT(1) => { + WAIT_EVENT(2) => { let mut msg = MSG::default(); unsafe { while PeekMessageW(&mut msg, None, 0, 0, PM_REMOVE).as_bool() { @@ -230,6 +260,8 @@ impl Platform for WindowsPlatform { } } } + // foreground tasks may have been queued in the message handlers + self.run_foreground_tasks(); } _ => { log::error!("Something went wrong while waiting {:?}", wait_result); @@ -319,17 +351,7 @@ impl Platform for WindowsPlatform { handle: AnyWindowHandle, options: WindowParams, ) -> Result> { - let lock = self.state.borrow(); - let window = WindowsWindow::new( - handle, - options, - self.icon, - self.foreground_executor.clone(), - lock.current_cursor, - self.windows_version, - self.validation_number, - )?; - drop(lock); + let window = WindowsWindow::new(handle, options, self.generate_creation_info())?; let handle = window.get_raw_handle(); self.raw_window_handles.write().push(handle); @@ -558,6 +580,15 @@ impl Drop for WindowsPlatform { } } +pub(crate) struct WindowCreationInfo { + pub(crate) icon: HICON, + pub(crate) executor: ForegroundExecutor, + pub(crate) current_cursor: HCURSOR, + pub(crate) windows_version: WindowsVersion, + pub(crate) validation_number: usize, + pub(crate) main_receiver: flume::Receiver, +} + fn open_target(target: &str) { unsafe { let ret = ShellExecuteW( @@ -631,22 +662,33 @@ fn file_open_dialog(options: PathPromptOptions) -> Result>> fn file_save_dialog(directory: PathBuf) -> Result> { let dialog: IFileSaveDialog = unsafe { CoCreateInstance(&FileSaveDialog, None, CLSCTX_ALL)? }; - if let Some(full_path) = directory.canonicalize().log_err() { - let full_path = full_path.to_string_lossy().to_string(); - if !full_path.is_empty() { - let path_item: IShellItem = - unsafe { SHCreateItemFromParsingName(&HSTRING::from(&full_path), None)? }; - unsafe { dialog.SetFolder(&path_item).log_err() }; + if !directory.to_string_lossy().is_empty() { + if let Some(full_path) = directory.canonicalize().log_err() { + let full_path = full_path.to_string_lossy().to_string(); + if !full_path.is_empty() { + let path_item: IShellItem = + unsafe { SHCreateItemFromParsingName(&HSTRING::from(&full_path), None)? }; + unsafe { dialog.SetFolder(&path_item).log_err() }; + } } } unsafe { + dialog.SetFileTypes(&[Common::COMDLG_FILTERSPEC { + pszName: windows::core::w!("All files"), + pszSpec: windows::core::w!("*.*"), + }])?; if dialog.Show(None).is_err() { // User cancelled return Ok(None); } } let shell_item = unsafe { dialog.GetResult()? }; - let file_path_string = unsafe { shell_item.GetDisplayName(SIGDN_FILESYSPATH)?.to_string()? }; + let file_path_string = unsafe { + let pwstr = shell_item.GetDisplayName(SIGDN_FILESYSPATH)?; + let string = pwstr.to_string()?; + CoTaskMemFree(Some(pwstr.0 as _)); + string + }; Ok(Some(PathBuf::from(file_path_string))) } diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index 1a059491a2a4ad..e2cfb38afd4dfc 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -12,6 +12,7 @@ use std::{ use ::util::ResultExt; use anyhow::{Context, Result}; +use async_task::Runnable; use futures::channel::oneshot::{self, Receiver}; use itertools::Itertools; use raw_window_handle as rwh; @@ -63,6 +64,7 @@ pub(crate) struct WindowsWindowStatePtr { pub(crate) executor: ForegroundExecutor, pub(crate) windows_version: WindowsVersion, pub(crate) validation_number: usize, + pub(crate) main_receiver: flume::Receiver, } impl WindowsWindowState { @@ -226,6 +228,7 @@ impl WindowsWindowStatePtr { executor: context.executor.clone(), windows_version: context.windows_version, validation_number: context.validation_number, + main_receiver: context.main_receiver.clone(), })) } } @@ -253,18 +256,23 @@ struct WindowCreateContext { current_cursor: HCURSOR, windows_version: WindowsVersion, validation_number: usize, + main_receiver: flume::Receiver, } impl WindowsWindow { pub(crate) fn new( handle: AnyWindowHandle, params: WindowParams, - icon: HICON, - executor: ForegroundExecutor, - current_cursor: HCURSOR, - windows_version: WindowsVersion, - validation_number: usize, + creation_info: WindowCreationInfo, ) -> Result { + let WindowCreationInfo { + icon, + executor, + current_cursor, + windows_version, + validation_number, + main_receiver, + } = creation_info; let classname = register_wnd_class(icon); let hide_title_bar = params .titlebar @@ -305,6 +313,7 @@ impl WindowsWindow { current_cursor, windows_version, validation_number, + main_receiver, }; let lpparam = Some(&context as *const _ as *const _); let creation_result = unsafe { From 4d074fc737db71ff10b4cf1e68f4b70e092124b6 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 17 Sep 2024 19:20:45 -0400 Subject: [PATCH 168/762] editor: Fix rewrap with a non-empty selection (#17980) This PR fixes an issue where rewrapping would not occur with a non-empty selection. It is only the expansion to neighboring lines that needs to be gated by an empty selection. Release Notes: - N/A --- crates/editor/src/editor.rs | 30 ++++++++++++++-------------- crates/editor/src/editor_tests.rs | 33 +++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 15 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c3c54e49513d7a..61a59665c15e3e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6736,22 +6736,22 @@ impl Editor { let mut line_prefix = indent_size.chars().collect::(); - if selection.is_empty() { - if let Some(comment_prefix) = - buffer - .language_scope_at(selection.head()) - .and_then(|language| { - language - .line_comment_prefixes() - .iter() - .find(|prefix| buffer.contains_str_at(indent_end, prefix)) - .cloned() - }) - { - line_prefix.push_str(&comment_prefix); - should_rewrap = true; - } + if let Some(comment_prefix) = + buffer + .language_scope_at(selection.head()) + .and_then(|language| { + language + .line_comment_prefixes() + .iter() + .find(|prefix| buffer.contains_str_at(indent_end, prefix)) + .cloned() + }) + { + line_prefix.push_str(&comment_prefix); + should_rewrap = true; + } + if selection.is_empty() { 'expand_upwards: while start_row > 0 { let prev_row = start_row - 1; if buffer.contains_str_at(Point::new(prev_row, 0), &line_prefix) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 6a8efc5905fbb1..e11b38ba59680d 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4017,6 +4017,39 @@ async fn test_rewrap(cx: &mut TestAppContext) { cx.assert_editor_state(wrapped_text); } + // Test that rewrapping works inside of a selection + { + let language = Arc::new(Language::new( + LanguageConfig { + line_comments: vec!["// ".into()], + ..LanguageConfig::default() + }, + None, + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + let unwrapped_text = indoc! {" + «// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae. Proin consectetur felis vel purus auctor, eu lacinia sapien scelerisque. Vivamus sit amet neque et quam tincidunt hendrerit. Praesent semper egestas tellus id dignissim. Pellentesque odio lectus, iaculis ac volutpat et, blandit quis urna. Sed vestibulum nisi sit amet nisl venenatis tempus. Donec molestie blandit quam, et porta nunc laoreet in. Integer sit amet scelerisque nisi. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras egestas porta metus, eu viverra ipsum efficitur quis. Donec luctus eros turpis, id vulputate turpis porttitor id. Aliquam id accumsan eros.ˇ» + "}; + + let wrapped_text = indoc! {" + // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit + // purus, a ornare lacus gravida vitae. Proin consectetur felis vel purus + // auctor, eu lacinia sapien scelerisque. Vivamus sit amet neque et quam + // tincidunt hendrerit. Praesent semper egestas tellus id dignissim. + // Pellentesque odio lectus, iaculis ac volutpat et, blandit quis urna. Sed + // vestibulum nisi sit amet nisl venenatis tempus. Donec molestie blandit quam, + // et porta nunc laoreet in. Integer sit amet scelerisque nisi. Lorem ipsum + // dolor sit amet, consectetur adipiscing elit. Cras egestas porta metus, eu + // viverra ipsum efficitur quis. Donec luctus eros turpis, id vulputate turpis + // porttitor id. Aliquam id accumsan eros.ˇ + "}; + + cx.set_state(unwrapped_text); + cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); + cx.assert_editor_state(wrapped_text); + } + // Test that cursors that expand to the same region are collapsed. { let language = Arc::new(Language::new( From 2e72fd210a93aaaf5a4a1ad9fd04a046fd1f394e Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 17 Sep 2024 19:43:59 -0600 Subject: [PATCH 169/762] Replace Default trait bound with a zero function on Summary/Dimension (#17975) This lets us provide a context when constructing the zero value. We need it so we can require anchors to be associated with a buffer id, which we're doing as part of simplifying the multibuffer API. Release Notes: - N/A Co-authored-by: Nathan --- crates/channel/src/channel_chat.rs | 30 ++-- crates/editor/src/display_map.rs | 5 +- crates/editor/src/display_map/block_map.rs | 38 +++-- crates/editor/src/display_map/crease_map.rs | 41 +++-- crates/editor/src/display_map/fold_map.rs | 79 +++++++--- crates/editor/src/display_map/inlay_map.rs | 47 ++++-- crates/editor/src/display_map/wrap_map.rs | 44 ++++-- crates/editor/src/git/blame.rs | 14 +- crates/git/src/diff.rs | 46 +++--- crates/gpui/src/elements/list.rs | 34 +++-- crates/language/src/buffer.rs | 10 +- crates/language/src/diagnostic_set.rs | 8 +- crates/language/src/syntax_map.rs | 39 +++-- .../src/syntax_map/syntax_map_tests.rs | 16 +- crates/multi_buffer/src/multi_buffer.rs | 142 +++++++++++------- .../notifications/src/notification_store.rs | 21 ++- crates/project/src/lsp_store.rs | 8 +- crates/rope/src/rope.rs | 66 +++++--- crates/rope/src/unclipped.rs | 4 + crates/sum_tree/src/cursor.rs | 36 +++-- crates/sum_tree/src/sum_tree.rs | 125 +++++++++------ crates/sum_tree/src/tree_map.rs | 22 ++- crates/text/src/anchor.rs | 2 +- crates/text/src/locator.rs | 4 + crates/text/src/operation_queue.rs | 12 +- crates/text/src/text.rs | 94 ++++++++---- crates/text/src/undo_map.rs | 9 +- crates/worktree/src/worktree.rs | 59 ++++++-- 28 files changed, 706 insertions(+), 349 deletions(-) diff --git a/crates/channel/src/channel_chat.rs b/crates/channel/src/channel_chat.rs index 286eb46a916004..1a9e46db0460a9 100644 --- a/crates/channel/src/channel_chat.rs +++ b/crates/channel/src/channel_chat.rs @@ -332,7 +332,7 @@ impl ChannelChat { .update(&mut cx, |chat, cx| { if let Some(first_id) = chat.first_loaded_message_id() { if first_id <= message_id { - let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(); + let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>(&()); let message_id = ChannelMessageId::Saved(message_id); cursor.seek(&message_id, Bias::Left, &()); return ControlFlow::Break( @@ -498,7 +498,7 @@ impl ChannelChat { } pub fn message(&self, ix: usize) -> &ChannelMessage { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); cursor.seek(&Count(ix), Bias::Right, &()); cursor.item().unwrap() } @@ -515,13 +515,13 @@ impl ChannelChat { } pub fn messages_in_range(&self, range: Range) -> impl Iterator { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); cursor.seek(&Count(range.start), Bias::Right, &()); cursor.take(range.len()) } pub fn pending_messages(&self) -> impl Iterator { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &()); cursor } @@ -589,11 +589,11 @@ impl ChannelChat { fn insert_messages(&mut self, messages: SumTree, cx: &mut ModelContext) { if let Some((first_message, last_message)) = messages.first().zip(messages.last()) { let nonces = messages - .cursor::<()>() + .cursor::<()>(&()) .map(|m| m.nonce) .collect::>(); - let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(); + let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>(&()); let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &()); let start_ix = old_cursor.start().1 .0; let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &()); @@ -646,7 +646,7 @@ impl ChannelChat { } fn message_removed(&mut self, id: u64, cx: &mut ModelContext) { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &()); if let Some(item) = cursor.item() { if item.id == ChannelMessageId::Saved(id) { @@ -685,7 +685,7 @@ impl ChannelChat { edited_at: Option, cx: &mut ModelContext, ) { - let mut cursor = self.messages.cursor::(); + let mut cursor = self.messages.cursor::(&()); let mut messages = cursor.slice(&id, Bias::Left, &()); let ix = messages.summary().count; @@ -716,7 +716,7 @@ async fn messages_from_proto( cx: &mut AsyncAppContext, ) -> Result> { let messages = ChannelMessage::from_proto_vec(proto_messages, user_store, cx).await?; - let mut result = SumTree::new(); + let mut result = SumTree::default(); result.extend(messages, &()); Ok(result) } @@ -825,6 +825,10 @@ impl Default for ChannelMessageId { impl sum_tree::Summary for ChannelMessageSummary { type Context = (); + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.max_id = summary.max_id; self.count += summary.count; @@ -832,6 +836,10 @@ impl sum_tree::Summary for ChannelMessageSummary { } impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) { debug_assert!(summary.max_id > *self); *self = summary.max_id; @@ -839,6 +847,10 @@ impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId { } impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for Count { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) { self.0 += summary.count; } diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 86ea7ee3fabf95..790a0a6a1eba78 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -127,7 +127,9 @@ impl DisplayMap { let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let tab_size = Self::tab_size(&buffer, cx); - let (inlay_map, snapshot) = InlayMap::new(buffer.read(cx).snapshot(cx)); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let crease_map = CreaseMap::new(&buffer_snapshot); + let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot); let (fold_map, snapshot) = FoldMap::new(snapshot); let (tab_map, snapshot) = TabMap::new(snapshot, tab_size); let (wrap_map, snapshot) = WrapMap::new(snapshot, font, font_size, wrap_width, cx); @@ -138,7 +140,6 @@ impl DisplayMap { excerpt_header_height, excerpt_footer_height, ); - let crease_map = CreaseMap::default(); cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach(); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 28e0b9d7af3709..3a298832dee5ef 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -389,10 +389,10 @@ impl BlockMap { } let mut transforms = self.transforms.borrow_mut(); - let mut new_transforms = SumTree::new(); + let mut new_transforms = SumTree::default(); let old_row_count = transforms.summary().input_rows; let new_row_count = wrap_snapshot.max_point().row() + 1; - let mut cursor = transforms.cursor::(); + let mut cursor = transforms.cursor::(&()); let mut last_block_ix = 0; let mut blocks_in_edit = Vec::new(); let mut edits = edits.into_iter().peekable(); @@ -757,7 +757,7 @@ impl<'a> BlockMapReader<'a> { .unwrap_or(self.wrap_snapshot.max_point().row() + 1), ); - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&start_wrap_row, Bias::Left, &()); while let Some(transform) = cursor.item() { if cursor.start().0 > end_wrap_row { @@ -950,7 +950,7 @@ impl BlockSnapshot { highlights: Highlights<'a>, ) -> BlockChunks<'a> { let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let input_end = { cursor.seek(&BlockRow(rows.end), Bias::Right, &()); let overshoot = if cursor @@ -990,7 +990,7 @@ impl BlockSnapshot { } pub(super) fn buffer_rows(&self, start_row: BlockRow) -> BlockBufferRows { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&start_row, Bias::Right, &()); let (output_start, input_start) = cursor.start(); let overshoot = if cursor.item().map_or(false, |t| t.is_isomorphic()) { @@ -1008,7 +1008,7 @@ impl BlockSnapshot { } pub fn blocks_in_range(&self, rows: Range) -> impl Iterator { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&BlockRow(rows.start), Bias::Left, &()); while cursor.start().0 < rows.start && cursor.end(&()).0 <= rows.start { cursor.next(&()); @@ -1050,7 +1050,7 @@ impl BlockSnapshot { let wrap_point = self .wrap_snapshot .make_wrap_point(excerpt_range.start, Bias::Left); - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Left, &()); while let Some(transform) = cursor.item() { if let Some(block) = transform.block.as_ref() { @@ -1072,7 +1072,7 @@ impl BlockSnapshot { .wrap_snapshot .make_wrap_point(excerpt_range.end, Bias::Left); - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Left, &()); while let Some(transform) = cursor.item() { if let Some(block) = transform.block.as_ref() { @@ -1102,7 +1102,7 @@ impl BlockSnapshot { } pub(super) fn line_len(&self, row: BlockRow) -> u32 { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(row.0), Bias::Right, &()); if let Some(transform) = cursor.item() { let (output_start, input_start) = cursor.start(); @@ -1118,13 +1118,13 @@ impl BlockSnapshot { } pub(super) fn is_block_line(&self, row: BlockRow) -> bool { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&row, Bias::Right, &()); cursor.item().map_or(false, |t| t.block.is_some()) } pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(point.row), Bias::Right, &()); let max_input_row = WrapRow(self.transforms.summary().input_rows); @@ -1172,7 +1172,7 @@ impl BlockSnapshot { } pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint { - let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(); + let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &()); if let Some(transform) = cursor.item() { debug_assert!(transform.is_isomorphic()); @@ -1188,7 +1188,7 @@ impl BlockSnapshot { } pub fn to_wrap_point(&self, block_point: BlockPoint) -> WrapPoint { - let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(block_point.row), Bias::Right, &()); if let Some(transform) = cursor.item() { match transform.block.as_ref().map(|b| b.disposition()) { @@ -1368,6 +1368,10 @@ impl sum_tree::Item for Transform { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.input_rows += summary.input_rows; self.output_rows += summary.output_rows; @@ -1375,12 +1379,20 @@ impl sum_tree::Summary for TransformSummary { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapRow { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.input_rows; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for BlockRow { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.output_rows; } diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index 10ee125b3237ac..bfc9c7d1a4ffbf 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -12,19 +12,34 @@ use crate::FoldPlaceholder; #[derive(Copy, Clone, Default, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)] pub struct CreaseId(usize); -#[derive(Default)] pub struct CreaseMap { snapshot: CreaseSnapshot, next_id: CreaseId, id_to_range: HashMap>, } -#[derive(Clone, Default)] +impl CreaseMap { + pub fn new(snapshot: &MultiBufferSnapshot) -> Self { + CreaseMap { + snapshot: CreaseSnapshot::new(snapshot), + next_id: CreaseId::default(), + id_to_range: HashMap::default(), + } + } +} + +#[derive(Clone)] pub struct CreaseSnapshot { creases: SumTree, } impl CreaseSnapshot { + pub fn new(snapshot: &MultiBufferSnapshot) -> Self { + CreaseSnapshot { + creases: SumTree::new(snapshot), + } + } + /// Returns the first Crease starting on the specified buffer row. pub fn query_row<'a>( &'a self, @@ -32,7 +47,7 @@ impl CreaseSnapshot { snapshot: &'a MultiBufferSnapshot, ) -> Option<&'a Crease> { let start = snapshot.anchor_before(Point::new(row.0, 0)); - let mut cursor = self.creases.cursor::(); + let mut cursor = self.creases.cursor::(snapshot); cursor.seek(&start, Bias::Left, snapshot); while let Some(item) = cursor.item() { match Ord::cmp(&item.crease.range.start.to_point(snapshot).row, &row.0) { @@ -56,7 +71,7 @@ impl CreaseSnapshot { snapshot: &'a MultiBufferSnapshot, ) -> impl '_ + Iterator { let start = snapshot.anchor_before(Point::new(range.start.0, 0)); - let mut cursor = self.creases.cursor::(); + let mut cursor = self.creases.cursor::(snapshot); cursor.seek(&start, Bias::Left, snapshot); std::iter::from_fn(move || { @@ -79,7 +94,7 @@ impl CreaseSnapshot { &self, snapshot: &MultiBufferSnapshot, ) -> Vec<(CreaseId, Range)> { - let mut cursor = self.creases.cursor::(); + let mut cursor = self.creases.cursor::(snapshot); let mut results = Vec::new(); cursor.next(snapshot); @@ -194,8 +209,8 @@ impl CreaseMap { ) -> Vec { let mut new_ids = Vec::new(); self.snapshot.creases = { - let mut new_creases = SumTree::new(); - let mut cursor = self.snapshot.creases.cursor::(); + let mut new_creases = SumTree::new(snapshot); + let mut cursor = self.snapshot.creases.cursor::(snapshot); for crease in creases { new_creases.append(cursor.slice(&crease.range, Bias::Left, snapshot), snapshot); @@ -227,8 +242,8 @@ impl CreaseMap { }); self.snapshot.creases = { - let mut new_creases = SumTree::new(); - let mut cursor = self.snapshot.creases.cursor::(); + let mut new_creases = SumTree::new(snapshot); + let mut cursor = self.snapshot.creases.cursor::(snapshot); for (id, range) in removals { new_creases.append(cursor.slice(&range, Bias::Left, snapshot), snapshot); @@ -264,6 +279,10 @@ impl Default for ItemSummary { impl sum_tree::Summary for ItemSummary { type Context = MultiBufferSnapshot; + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _snapshot: &MultiBufferSnapshot) { self.range = other.range.clone(); } @@ -303,7 +322,7 @@ mod test { let text = "line1\nline2\nline3\nline4\nline5"; let buffer = MultiBuffer::build_simple(text, cx); let snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); - let mut crease_map = CreaseMap::default(); + let mut crease_map = CreaseMap::new(&buffer.read(cx).read(cx)); // Insert creases let creases = [ @@ -350,7 +369,7 @@ mod test { let text = "line1\nline2\nline3\nline4\nline5\nline6\nline7"; let buffer = MultiBuffer::build_simple(text, cx); let snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); - let mut crease_map = CreaseMap::default(); + let mut crease_map = CreaseMap::new(&snapshot); let creases = [ Crease::new( diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 486fe4b2e5d40b..37983030b8e1a7 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -79,7 +79,7 @@ impl FoldPoint { } pub fn to_inlay_point(self, snapshot: &FoldSnapshot) -> InlayPoint { - let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = snapshot.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = self.0 - cursor.start().0 .0; InlayPoint(cursor.start().1 .0 + overshoot) @@ -88,7 +88,7 @@ impl FoldPoint { pub fn to_offset(self, snapshot: &FoldSnapshot) -> FoldOffset { let mut cursor = snapshot .transforms - .cursor::<(FoldPoint, TransformSummary)>(); + .cursor::<(FoldPoint, TransformSummary)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = self.0 - cursor.start().1.output.lines; let mut offset = cursor.start().1.output.len; @@ -105,6 +105,10 @@ impl FoldPoint { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.lines; } @@ -154,8 +158,8 @@ impl<'a> FoldMapWriter<'a> { folds.sort_unstable_by(|a, b| sum_tree::SeekTarget::cmp(&a.range, &b.range, buffer)); self.0.snapshot.folds = { - let mut new_tree = SumTree::new(); - let mut cursor = self.0.snapshot.folds.cursor::(); + let mut new_tree = SumTree::new(buffer); + let mut cursor = self.0.snapshot.folds.cursor::(buffer); for fold in folds { new_tree.append(cursor.slice(&fold.range, Bias::Right, buffer), buffer); new_tree.push(fold, buffer); @@ -202,8 +206,8 @@ impl<'a> FoldMapWriter<'a> { fold_ixs_to_delete.dedup(); self.0.snapshot.folds = { - let mut cursor = self.0.snapshot.folds.cursor::(); - let mut folds = SumTree::new(); + let mut cursor = self.0.snapshot.folds.cursor::(buffer); + let mut folds = SumTree::new(buffer); for fold_ix in fold_ixs_to_delete { folds.append(cursor.slice(&fold_ix, Bias::Right, buffer), buffer); cursor.next(buffer); @@ -230,7 +234,7 @@ impl FoldMap { pub(crate) fn new(inlay_snapshot: InlaySnapshot) -> (Self, FoldSnapshot) { let this = Self { snapshot: FoldSnapshot { - folds: Default::default(), + folds: SumTree::new(&inlay_snapshot.buffer), transforms: SumTree::from_item( Transform { summary: TransformSummary { @@ -314,8 +318,8 @@ impl FoldMap { } else { let mut inlay_edits_iter = inlay_edits.iter().cloned().peekable(); - let mut new_transforms = SumTree::::new(); - let mut cursor = self.snapshot.transforms.cursor::(); + let mut new_transforms = SumTree::::default(); + let mut cursor = self.snapshot.transforms.cursor::(&()); cursor.seek(&InlayOffset(0), Bias::Right, &()); while let Some(mut edit) = inlay_edits_iter.next() { @@ -367,7 +371,10 @@ impl FoldMap { let anchor = inlay_snapshot .buffer .anchor_before(inlay_snapshot.to_buffer_offset(edit.new.start)); - let mut folds_cursor = self.snapshot.folds.cursor::(); + let mut folds_cursor = self + .snapshot + .folds + .cursor::(&inlay_snapshot.buffer); folds_cursor.seek( &FoldRange(anchor..Anchor::max()), Bias::Left, @@ -470,8 +477,8 @@ impl FoldMap { let mut old_transforms = self .snapshot .transforms - .cursor::<(InlayOffset, FoldOffset)>(); - let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(); + .cursor::<(InlayOffset, FoldOffset)>(&()); + let mut new_transforms = new_transforms.cursor::<(InlayOffset, FoldOffset)>(&()); for mut edit in inlay_edits { old_transforms.seek(&edit.old.start, Bias::Left, &()); @@ -545,7 +552,7 @@ impl FoldSnapshot { pub fn text_summary_for_range(&self, range: Range) -> TextSummary { let mut summary = TextSummary::default(); - let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&range.start, Bias::Right, &()); if let Some(transform) = cursor.item() { let start_in_transform = range.start.0 - cursor.start().0 .0; @@ -594,7 +601,7 @@ impl FoldSnapshot { } pub fn to_fold_point(&self, point: InlayPoint, bias: Bias) -> FoldPoint { - let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, FoldPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().map_or(false, |t| t.is_fold()) { if bias == Bias::Left || point == cursor.start().0 { @@ -631,7 +638,7 @@ impl FoldSnapshot { } let fold_point = FoldPoint::new(start_row, 0); - let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&fold_point, Bias::Left, &()); let overshoot = fold_point.0 - cursor.start().0 .0; @@ -672,7 +679,7 @@ impl FoldSnapshot { { let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer); let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset); - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&inlay_offset, Bias::Right, &()); cursor.item().map_or(false, |t| t.placeholder.is_some()) } @@ -681,7 +688,7 @@ impl FoldSnapshot { let mut inlay_point = self .inlay_snapshot .to_inlay_point(Point::new(buffer_row.0, 0)); - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&inlay_point, Bias::Right, &()); loop { match cursor.item() { @@ -711,7 +718,7 @@ impl FoldSnapshot { language_aware: bool, highlights: Highlights<'a>, ) -> FoldChunks<'a> { - let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(); + let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); transform_cursor.seek(&range.start, Bias::Right, &()); let inlay_start = { @@ -766,7 +773,7 @@ impl FoldSnapshot { } pub fn clip_point(&self, point: FoldPoint, bias: Bias) -> FoldPoint { - let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(FoldPoint, InlayPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if let Some(transform) = cursor.item() { let transform_start = cursor.start().0 .0; @@ -826,7 +833,7 @@ where let buffer = &inlay_snapshot.buffer; let start = buffer.anchor_before(range.start.to_offset(buffer)); let end = buffer.anchor_after(range.end.to_offset(buffer)); - let mut cursor = folds.filter::<_, usize>(move |summary| { + let mut cursor = folds.filter::<_, usize>(buffer, move |summary| { let start_cmp = start.cmp(&summary.max_end, buffer); let end_cmp = end.cmp(&summary.min_start, buffer); @@ -945,6 +952,10 @@ impl sum_tree::Item for Transform { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.input += &other.input; self.output += &other.output; @@ -1028,6 +1039,10 @@ impl Default for FoldSummary { impl sum_tree::Summary for FoldSummary { type Context = MultiBufferSnapshot; + fn zero(_cx: &MultiBufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { if other.min_start.cmp(&self.min_start, buffer) == Ordering::Less { self.min_start = other.min_start; @@ -1052,6 +1067,10 @@ impl sum_tree::Summary for FoldSummary { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for FoldRange { + fn zero(_cx: &MultiBufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { self.0.start = summary.start; self.0.end = summary.end; @@ -1065,6 +1084,10 @@ impl<'a> sum_tree::SeekTarget<'a, FoldSummary, FoldRange> for FoldRange { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { + fn zero(_cx: &MultiBufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { *self += summary.count; } @@ -1196,7 +1219,7 @@ impl FoldOffset { pub fn to_point(self, snapshot: &FoldSnapshot) -> FoldPoint { let mut cursor = snapshot .transforms - .cursor::<(FoldOffset, TransformSummary)>(); + .cursor::<(FoldOffset, TransformSummary)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) { Point::new(0, (self.0 - cursor.start().0 .0) as u32) @@ -1210,7 +1233,7 @@ impl FoldOffset { #[cfg(test)] pub fn to_inlay_offset(self, snapshot: &FoldSnapshot) -> InlayOffset { - let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(); + let mut cursor = snapshot.transforms.cursor::<(FoldOffset, InlayOffset)>(&()); cursor.seek(&self, Bias::Right, &()); let overshoot = self.0 - cursor.start().0 .0; InlayOffset(cursor.start().1 .0 + overshoot) @@ -1240,18 +1263,30 @@ impl Sub for FoldOffset { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldOffset { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.len; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.input.lines; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.input.len; } diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index b6ab2cdd28feea..712db45e3f61a7 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -97,6 +97,10 @@ struct TransformSummary { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.input += &other.input; self.output += &other.output; @@ -137,6 +141,10 @@ impl SubAssign for InlayOffset { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayOffset { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.len; } @@ -162,18 +170,30 @@ impl Sub for InlayPoint { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for InlayPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.lines; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for usize { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { *self += &summary.input.len; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for Point { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { *self += &summary.input.lines; } @@ -475,8 +495,8 @@ impl InlayMap { (snapshot.clone(), Vec::new()) } else { let mut inlay_edits = Patch::default(); - let mut new_transforms = SumTree::new(); - let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(); + let mut new_transforms = SumTree::default(); + let mut cursor = snapshot.transforms.cursor::<(usize, InlayOffset)>(&()); let mut buffer_edits_iter = buffer_edits.iter().peekable(); while let Some(buffer_edit) = buffer_edits_iter.next() { new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &()); @@ -693,7 +713,7 @@ impl InlaySnapshot { pub fn to_point(&self, offset: InlayOffset) -> InlayPoint { let mut cursor = self .transforms - .cursor::<(InlayOffset, (InlayPoint, usize))>(); + .cursor::<(InlayOffset, (InlayPoint, usize))>(&()); cursor.seek(&offset, Bias::Right, &()); let overshoot = offset.0 - cursor.start().0 .0; match cursor.item() { @@ -723,7 +743,7 @@ impl InlaySnapshot { pub fn to_offset(&self, point: InlayPoint) -> InlayOffset { let mut cursor = self .transforms - .cursor::<(InlayPoint, (InlayOffset, Point))>(); + .cursor::<(InlayPoint, (InlayOffset, Point))>(&()); cursor.seek(&point, Bias::Right, &()); let overshoot = point.0 - cursor.start().0 .0; match cursor.item() { @@ -741,9 +761,8 @@ impl InlaySnapshot { None => self.len(), } } - pub fn to_buffer_point(&self, point: InlayPoint) -> Point { - let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); cursor.seek(&point, Bias::Right, &()); match cursor.item() { Some(Transform::Isomorphic(_)) => { @@ -754,9 +773,8 @@ impl InlaySnapshot { None => self.buffer.max_point(), } } - pub fn to_buffer_offset(&self, offset: InlayOffset) -> usize { - let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); cursor.seek(&offset, Bias::Right, &()); match cursor.item() { Some(Transform::Isomorphic(_)) => { @@ -769,7 +787,7 @@ impl InlaySnapshot { } pub fn to_inlay_offset(&self, offset: usize) -> InlayOffset { - let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(); + let mut cursor = self.transforms.cursor::<(usize, InlayOffset)>(&()); cursor.seek(&offset, Bias::Left, &()); loop { match cursor.item() { @@ -801,9 +819,8 @@ impl InlaySnapshot { } } } - pub fn to_inlay_point(&self, point: Point) -> InlayPoint { - let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(); + let mut cursor = self.transforms.cursor::<(Point, InlayPoint)>(&()); cursor.seek(&point, Bias::Left, &()); loop { match cursor.item() { @@ -837,7 +854,7 @@ impl InlaySnapshot { } pub fn clip_point(&self, mut point: InlayPoint, mut bias: Bias) -> InlayPoint { - let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); cursor.seek(&point, Bias::Left, &()); loop { match cursor.item() { @@ -934,7 +951,7 @@ impl InlaySnapshot { pub fn text_summary_for_range(&self, range: Range) -> TextSummary { let mut summary = TextSummary::default(); - let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); cursor.seek(&range.start, Bias::Right, &()); let overshoot = range.start.0 - cursor.start().0 .0; @@ -982,7 +999,7 @@ impl InlaySnapshot { } pub fn buffer_rows(&self, row: u32) -> InlayBufferRows<'_> { - let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(); + let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); let inlay_point = InlayPoint::new(row, 0); cursor.seek(&inlay_point, Bias::Left, &()); @@ -1024,7 +1041,7 @@ impl InlaySnapshot { language_aware: bool, highlights: Highlights<'a>, ) -> InlayChunks<'a> { - let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); + let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(&()); cursor.seek(&range.start, Bias::Right, &()); let mut highlight_endpoints = Vec::new(); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 08b2ae0c645bcd..564bba2158030e 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -204,7 +204,7 @@ impl WrapMap { } } else { let old_rows = self.snapshot.transforms.summary().output.lines.row + 1; - self.snapshot.transforms = SumTree::new(); + self.snapshot.transforms = SumTree::default(); let summary = self.snapshot.tab_snapshot.text_summary(); if !summary.lines.is_zero() { self.snapshot @@ -303,7 +303,7 @@ impl WrapMap { impl WrapSnapshot { fn new(tab_snapshot: TabSnapshot) -> Self { - let mut transforms = SumTree::new(); + let mut transforms = SumTree::default(); let extent = tab_snapshot.text_summary(); if !extent.lines.is_zero() { transforms.push(Transform::isomorphic(extent), &()); @@ -324,7 +324,7 @@ impl WrapSnapshot { if tab_edits.is_empty() { new_transforms = self.transforms.clone(); } else { - let mut old_cursor = self.transforms.cursor::(); + let mut old_cursor = self.transforms.cursor::(&()); let mut tab_edits_iter = tab_edits.iter().peekable(); new_transforms = @@ -424,7 +424,7 @@ impl WrapSnapshot { new_transforms = self.transforms.clone(); } else { let mut row_edits = row_edits.into_iter().peekable(); - let mut old_cursor = self.transforms.cursor::(); + let mut old_cursor = self.transforms.cursor::(&()); new_transforms = old_cursor.slice( &TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0), @@ -537,8 +537,8 @@ impl WrapSnapshot { fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> Patch { let mut wrap_edits = Vec::new(); - let mut old_cursor = self.transforms.cursor::(); - let mut new_cursor = new_snapshot.transforms.cursor::(); + let mut old_cursor = self.transforms.cursor::(&()); + let mut new_cursor = new_snapshot.transforms.cursor::(&()); for mut tab_edit in tab_edits.iter().cloned() { tab_edit.old.start.0.column = 0; tab_edit.old.end.0 += Point::new(1, 0); @@ -579,7 +579,7 @@ impl WrapSnapshot { ) -> WrapChunks<'a> { let output_start = WrapPoint::new(rows.start, 0); let output_end = WrapPoint::new(rows.end, 0); - let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); transforms.seek(&output_start, Bias::Right, &()); let mut input_start = TabPoint(transforms.start().1 .0); if transforms.item().map_or(false, |t| t.is_isomorphic()) { @@ -606,7 +606,7 @@ impl WrapSnapshot { } pub fn line_len(&self, row: u32) -> u32 { - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &()); if cursor .item() @@ -626,7 +626,7 @@ impl WrapSnapshot { } pub fn soft_wrap_indent(&self, row: u32) -> Option { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &()); cursor.item().and_then(|transform| { if transform.is_isomorphic() { @@ -642,7 +642,7 @@ impl WrapSnapshot { } pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows { - let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); let mut input_row = transforms.start().1.row(); if transforms.item().map_or(false, |t| t.is_isomorphic()) { @@ -662,7 +662,7 @@ impl WrapSnapshot { } pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint { - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); let mut tab_point = cursor.start().1 .0; if cursor.item().map_or(false, |t| t.is_isomorphic()) { @@ -680,14 +680,14 @@ impl WrapSnapshot { } pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint { - let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(); + let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&()); cursor.seek(&point, Bias::Right, &()); WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0)) } pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint { if bias == Bias::Left { - let mut cursor = self.transforms.cursor::(); + let mut cursor = self.transforms.cursor::(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().map_or(false, |t| !t.is_isomorphic()) { point = *cursor.start(); @@ -705,7 +705,7 @@ impl WrapSnapshot { *point.column_mut() = 0; - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -725,7 +725,7 @@ impl WrapSnapshot { pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option { point.0 += Point::new(1, 0); - let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); while let Some(transform) = cursor.item() { if transform.is_isomorphic() && cursor.start().1.column() == 0 { @@ -747,7 +747,7 @@ impl WrapSnapshot { ); { - let mut transforms = self.transforms.cursor::<()>().peekable(); + let mut transforms = self.transforms.cursor::<()>(&()).peekable(); while let Some(transform) = transforms.next() { if let Some(next_transform) = transforms.peek() { assert!(transform.is_isomorphic() != next_transform.is_isomorphic()); @@ -982,6 +982,10 @@ impl WrapPoint { impl sum_tree::Summary for TransformSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.input += &other.input; self.output += &other.output; @@ -989,6 +993,10 @@ impl sum_tree::Summary for TransformSummary { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.input.lines; } @@ -1001,6 +1009,10 @@ impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for TabPoi } impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapPoint { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += summary.output.lines; } diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 00531ee88649a5..775cbcc379e128 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -37,12 +37,20 @@ impl sum_tree::Item for GitBlameEntry { impl sum_tree::Summary for GitBlameEntrySummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _cx: &()) { self.rows += summary.rows; } } impl<'a> sum_tree::Dimension<'a, GitBlameEntrySummary> for u32 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a GitBlameEntrySummary, _cx: &()) { *self += summary.rows; } @@ -191,7 +199,7 @@ impl GitBlame { ) -> impl 'a + Iterator> { self.sync(cx); - let mut cursor = self.entries.cursor::(); + let mut cursor = self.entries.cursor::(&()); rows.into_iter().map(move |row| { let row = row?; cursor.seek_forward(&row.0, Bias::Right, &()); @@ -249,8 +257,8 @@ impl GitBlame { }) .peekable(); - let mut new_entries = SumTree::new(); - let mut cursor = self.entries.cursor::(); + let mut new_entries = SumTree::default(); + let mut cursor = self.entries.cursor::(&()); while let Some(mut edit) = row_edits.next() { while let Some(next_edit) = row_edits.peek() { diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index eedef199dc3798..8cc7ee186350c5 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -48,6 +48,10 @@ pub struct DiffHunkSummary { impl sum_tree::Summary for DiffHunkSummary { type Context = text::BufferSnapshot; + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { self.buffer_range.start = self .buffer_range @@ -63,17 +67,11 @@ pub struct BufferDiff { tree: SumTree>, } -impl Default for BufferDiff { - fn default() -> Self { - Self::new() - } -} - impl BufferDiff { - pub fn new() -> BufferDiff { + pub fn new(buffer: &BufferSnapshot) -> BufferDiff { BufferDiff { last_buffer_version: None, - tree: SumTree::new(), + tree: SumTree::new(buffer), } } @@ -97,11 +95,13 @@ impl BufferDiff { range: Range, buffer: &'a BufferSnapshot, ) -> impl 'a + Iterator> { - let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| { - let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); - let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); - !before_start && !after_end - }); + let mut cursor = self + .tree + .filter::<_, DiffHunkSummary>(buffer, move |summary| { + let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); + let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); + !before_start && !after_end + }); let anchor_iter = std::iter::from_fn(move || { cursor.next(buffer); @@ -142,11 +142,13 @@ impl BufferDiff { range: Range, buffer: &'a BufferSnapshot, ) -> impl 'a + Iterator> { - let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| { - let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); - let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); - !before_start && !after_end - }); + let mut cursor = self + .tree + .filter::<_, DiffHunkSummary>(buffer, move |summary| { + let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); + let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); + !before_start && !after_end + }); std::iter::from_fn(move || { cursor.prev(buffer); @@ -171,11 +173,11 @@ impl BufferDiff { #[cfg(test)] fn clear(&mut self, buffer: &text::BufferSnapshot) { self.last_buffer_version = Some(buffer.version().clone()); - self.tree = SumTree::new(); + self.tree = SumTree::new(buffer); } pub async fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) { - let mut tree = SumTree::new(); + let mut tree = SumTree::new(buffer); let diff_base_text = diff_base.to_string(); let buffer_text = buffer.as_rope().to_string(); @@ -351,7 +353,7 @@ mod tests { .unindent(); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); - let mut diff = BufferDiff::new(); + let mut diff = BufferDiff::new(&buffer); smol::block_on(diff.update(&diff_base_rope, &buffer)); assert_hunks( diff.hunks(&buffer), @@ -412,7 +414,7 @@ mod tests { .unindent(); let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); - let mut diff = BufferDiff::new(); + let mut diff = BufferDiff::new(&buffer); smol::block_on(diff.update(&diff_base_rope, &buffer)); assert_eq!(diff.hunks(&buffer).count(), 8); diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index bea8270fa5524e..d03392867b5ee2 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -181,7 +181,7 @@ impl ListState { last_layout_bounds: None, last_padding: None, render_item: Box::new(render_item), - items: SumTree::new(), + items: SumTree::default(), logical_scroll_top: None, alignment, overdraw, @@ -228,7 +228,7 @@ impl ListState { ) { let state = &mut *self.0.borrow_mut(); - let mut old_items = state.items.cursor::(); + let mut old_items = state.items.cursor::(&()); let mut new_items = old_items.slice(&Count(old_range.start), Bias::Right, &()); old_items.seek_forward(&Count(old_range.end), Bias::Right, &()); @@ -297,7 +297,7 @@ impl ListState { scroll_top.item_ix = ix; scroll_top.offset_in_item = px(0.); } else { - let mut cursor = state.items.cursor::(); + let mut cursor = state.items.cursor::(&()); cursor.seek(&Count(ix + 1), Bias::Right, &()); let bottom = cursor.start().height + padding.top; let goal_top = px(0.).max(bottom - height + padding.bottom); @@ -326,7 +326,7 @@ impl ListState { return None; } - let mut cursor = state.items.cursor::<(Count, Height)>(); + let mut cursor = state.items.cursor::<(Count, Height)>(&()); cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); let scroll_top = cursor.start().1 .0 + scroll_top.offset_in_item; @@ -348,7 +348,7 @@ impl ListState { impl StateInner { fn visible_range(&self, height: Pixels, scroll_top: &ListOffset) -> Range { - let mut cursor = self.items.cursor::(); + let mut cursor = self.items.cursor::(&()); cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); let start_y = cursor.start().height + scroll_top.offset_in_item; cursor.seek_forward(&Height(start_y + height), Bias::Left, &()); @@ -378,7 +378,7 @@ impl StateInner { if self.alignment == ListAlignment::Bottom && new_scroll_top == scroll_max { self.logical_scroll_top = None; } else { - let mut cursor = self.items.cursor::(); + let mut cursor = self.items.cursor::(&()); cursor.seek(&Height(new_scroll_top), Bias::Right, &()); let item_ix = cursor.start().count; let offset_in_item = new_scroll_top - cursor.start().height; @@ -418,7 +418,7 @@ impl StateInner { } fn scroll_top(&self, logical_scroll_top: &ListOffset) -> Pixels { - let mut cursor = self.items.cursor::(); + let mut cursor = self.items.cursor::(&()); cursor.seek(&Count(logical_scroll_top.item_ix), Bias::Right, &()); cursor.start().height + logical_scroll_top.offset_in_item } @@ -445,7 +445,7 @@ impl StateInner { AvailableSpace::MinContent, ); - let mut cursor = old_items.cursor::(); + let mut cursor = old_items.cursor::(&()); // Render items after the scroll top, including those in the trailing overdraw cursor.seek(&Count(scroll_top.item_ix), Bias::Right, &()); @@ -560,7 +560,7 @@ impl StateInner { } let measured_range = cursor.start().0..(cursor.start().0 + measured_items.len()); - let mut cursor = old_items.cursor::(); + let mut cursor = old_items.cursor::(&()); let mut new_items = cursor.slice(&Count(measured_range.start), Bias::Right, &()); new_items.extend(measured_items, &()); cursor.seek(&Count(measured_range.end), Bias::Right, &()); @@ -573,7 +573,7 @@ impl StateInner { if !rendered_focused_item { let mut cursor = self .items - .filter::<_, Count>(|summary| summary.has_focus_handles); + .filter::<_, Count>(&(), |summary| summary.has_focus_handles); cursor.next(&()); while let Some(item) = cursor.item() { if item.contains_focused(cx) { @@ -629,7 +629,7 @@ impl StateInner { offset_in_item: autoscroll_bounds.top() - item_origin.y, }); } else if autoscroll_bounds.bottom() > bounds.bottom() { - let mut cursor = self.items.cursor::(); + let mut cursor = self.items.cursor::(&()); cursor.seek(&Count(item.index), Bias::Right, &()); let mut height = bounds.size.height - padding.top - padding.bottom; @@ -883,6 +883,10 @@ impl sum_tree::Item for ListItem { impl sum_tree::Summary for ListItemSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.count += summary.count; self.rendered_count += summary.rendered_count; @@ -893,12 +897,20 @@ impl sum_tree::Summary for ListItemSummary { } impl<'a> sum_tree::Dimension<'a, ListItemSummary> for Count { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ListItemSummary, _: &()) { self.0 += summary.count; } } impl<'a> sum_tree::Dimension<'a, ListItemSummary> for Height { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ListItemSummary, _: &()) { self.0 += summary.height; } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 76058ffd9bac9a..43fe1565acb796 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -722,7 +722,9 @@ impl Buffer { capability: Capability, ) -> Self { let saved_mtime = file.as_ref().and_then(|file| file.mtime()); - + let snapshot = buffer.snapshot(); + let git_diff = git::diff::BufferDiff::new(&snapshot); + let syntax_map = Mutex::new(SyntaxMap::new(&snapshot)); Self { saved_mtime, saved_version: buffer.version(), @@ -739,10 +741,10 @@ impl Buffer { }) .map(Rope::from), diff_base_version: 0, - git_diff: git::diff::BufferDiff::new(), + git_diff, file, capability, - syntax_map: Mutex::new(SyntaxMap::new()), + syntax_map, parsing_in_background: false, non_text_state_update_count: 0, sync_parse_timeout: Duration::from_millis(1), @@ -809,7 +811,7 @@ impl Buffer { /// Assign a language to the buffer. pub fn set_language(&mut self, language: Option>, cx: &mut ModelContext) { self.non_text_state_update_count += 1; - self.syntax_map.lock().clear(); + self.syntax_map.lock().clear(&self.text); self.language = language; self.reparse(cx); cx.emit(BufferEvent::LanguageChanged); diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index bc53778d97fb50..c35659d9bbd4d1 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -15,7 +15,7 @@ use text::{Anchor, FromAnchor, PointUtf16, ToOffset}; /// The diagnostics are stored in a [`SumTree`], which allows this struct /// to be cheaply copied, and allows for efficient retrieval of the /// diagnostics that intersect a given range of the buffer. -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] pub struct DiagnosticSet { diagnostics: SumTree>, } @@ -135,7 +135,7 @@ impl DiagnosticSet { { let end_bias = if inclusive { Bias::Right } else { Bias::Left }; let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias); - let mut cursor = self.diagnostics.filter::<_, ()>({ + let mut cursor = self.diagnostics.filter::<_, ()>(buffer, { move |summary: &Summary| { let start_cmp = range.start.cmp(&summary.max_end, buffer); let end_cmp = range.end.cmp(&summary.min_start, buffer); @@ -261,6 +261,10 @@ impl Default for Summary { impl sum_tree::Summary for Summary { type Context = text::BufferSnapshot; + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { if other.min_start.cmp(&self.min_start, buffer).is_lt() { self.min_start = other.min_start; diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index 0cdc166570face..daae54fb4da62a 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -18,13 +18,12 @@ use sum_tree::{Bias, SeekTarget, SumTree}; use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; use tree_sitter::{Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree}; -#[derive(Default)] pub struct SyntaxMap { snapshot: SyntaxSnapshot, language_registry: Option>, } -#[derive(Clone, Default)] +#[derive(Clone)] pub struct SyntaxSnapshot { layers: SumTree, parsed_version: clock::Global, @@ -212,8 +211,11 @@ struct ByteChunks<'a>(text::Chunks<'a>); pub(crate) struct QueryCursorHandle(Option); impl SyntaxMap { - pub fn new() -> Self { - Self::default() + pub fn new(text: &BufferSnapshot) -> Self { + Self { + snapshot: SyntaxSnapshot::new(text), + language_registry: None, + } } pub fn set_language_registry(&mut self, registry: Arc) { @@ -242,12 +244,21 @@ impl SyntaxMap { self.snapshot = snapshot; } - pub fn clear(&mut self) { - self.snapshot = SyntaxSnapshot::default(); + pub fn clear(&mut self, text: &BufferSnapshot) { + self.snapshot = SyntaxSnapshot::new(text); } } impl SyntaxSnapshot { + fn new(text: &BufferSnapshot) -> Self { + Self { + layers: SumTree::new(text), + parsed_version: clock::Global::default(), + interpolated_version: clock::Global::default(), + language_registry_version: 0, + } + } + pub fn is_empty(&self) -> bool { self.layers.is_empty() } @@ -262,10 +273,10 @@ impl SyntaxSnapshot { return; } - let mut layers = SumTree::new(); + let mut layers = SumTree::new(text); let mut first_edit_ix_for_depth = 0; let mut prev_depth = 0; - let mut cursor = self.layers.cursor::(); + let mut cursor = self.layers.cursor::(text); cursor.next(text); 'outer: loop { @@ -388,7 +399,7 @@ impl SyntaxSnapshot { let mut resolved_injection_ranges = Vec::new(); let mut cursor = self .layers - .filter::<_, ()>(|summary| summary.contains_unknown_injections); + .filter::<_, ()>(text, |summary| summary.contains_unknown_injections); cursor.next(text); while let Some(layer) = cursor.item() { let SyntaxLayerContent::Pending { language_name } = &layer.content else { @@ -430,9 +441,9 @@ impl SyntaxSnapshot { log::trace!("reparse. invalidated ranges:{:?}", invalidated_ranges); let max_depth = self.layers.summary().max_depth; - let mut cursor = self.layers.cursor::(); + let mut cursor = self.layers.cursor::(text); cursor.next(text); - let mut layers = SumTree::new(); + let mut layers = SumTree::new(text); let mut changed_regions = ChangeRegionSet::default(); let mut queue = BinaryHeap::new(); @@ -823,7 +834,7 @@ impl SyntaxSnapshot { let start = buffer.anchor_before(start_offset); let end = buffer.anchor_after(end_offset); - let mut cursor = self.layers.filter::<_, ()>(move |summary| { + let mut cursor = self.layers.filter::<_, ()>(buffer, move |summary| { if summary.max_depth > summary.min_depth { true } else { @@ -1666,6 +1677,10 @@ impl Default for SyntaxLayerSummary { impl sum_tree::Summary for SyntaxLayerSummary { type Context = BufferSnapshot; + fn zero(_cx: &BufferSnapshot) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { if other.max_depth > self.max_depth { self.max_depth = other.max_depth; diff --git a/crates/language/src/syntax_map/syntax_map_tests.rs b/crates/language/src/syntax_map/syntax_map_tests.rs index 6f42252da56cfd..f6d27bcbd21943 100644 --- a/crates/language/src/syntax_map/syntax_map_tests.rs +++ b/crates/language/src/syntax_map/syntax_map_tests.rs @@ -103,7 +103,7 @@ fn test_syntax_map_layers_for_range(cx: &mut AppContext) { .unindent(), ); - let mut syntax_map = SyntaxMap::new(); + let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); syntax_map.reparse(language.clone(), &buffer); @@ -202,7 +202,7 @@ fn test_dynamic_language_injection(cx: &mut AppContext) { .unindent(), ); - let mut syntax_map = SyntaxMap::new(); + let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); syntax_map.reparse(markdown.clone(), &buffer); syntax_map.reparse(markdown_inline.clone(), &buffer); @@ -897,11 +897,11 @@ fn test_random_edits( let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), text); - let mut syntax_map = SyntaxMap::new(); + let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); syntax_map.reparse(language.clone(), &buffer); - let mut reference_syntax_map = SyntaxMap::new(); + let mut reference_syntax_map = SyntaxMap::new(&buffer); reference_syntax_map.set_language_registry(registry.clone()); log::info!("initial text:\n{}", buffer.text()); @@ -918,7 +918,7 @@ fn test_random_edits( syntax_map.reparse(language.clone(), &buffer); - reference_syntax_map.clear(); + reference_syntax_map.clear(&buffer); reference_syntax_map.reparse(language.clone(), &buffer); } @@ -931,7 +931,7 @@ fn test_random_edits( syntax_map.interpolate(&buffer); syntax_map.reparse(language.clone(), &buffer); - reference_syntax_map.clear(); + reference_syntax_map.clear(&buffer); reference_syntax_map.reparse(language.clone(), &buffer); assert_eq!( syntax_map.layers(&buffer).len(), @@ -1082,7 +1082,7 @@ fn test_edit_sequence( .unwrap(); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), Default::default()); - let mut mutated_syntax_map = SyntaxMap::new(); + let mut mutated_syntax_map = SyntaxMap::new(&buffer); mutated_syntax_map.set_language_registry(registry.clone()); mutated_syntax_map.reparse(language.clone(), &buffer); @@ -1097,7 +1097,7 @@ fn test_edit_sequence( // Create a second syntax map from scratch log::info!("fresh parse {i}: {marked_string:?}"); - let mut reference_syntax_map = SyntaxMap::new(); + let mut reference_syntax_map = SyntaxMap::new(&buffer); reference_syntax_map.set_language_registry(registry.clone()); reference_syntax_map.reparse(language.clone(), &buffer); diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 5b6eddd5b16bf1..9dee092dea9f29 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -561,7 +561,7 @@ impl MultiBuffer { } let mut buffer_edits: HashMap> = Default::default(); let mut edited_excerpt_ids = Vec::new(); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); for (ix, (range, new_text)) in edits.enumerate() { let new_text: Arc = new_text.into(); let original_indent_column = original_indent_columns.get(ix).copied().unwrap_or(0); @@ -841,7 +841,7 @@ impl MultiBuffer { let mut ranges = Vec::new(); let snapshot = self.read(cx); let buffers = self.buffers.borrow(); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); for (buffer_id, buffer_transaction) in &transaction.buffer_transactions { let Some(buffer_state) = buffers.get(buffer_id) else { @@ -957,7 +957,7 @@ impl MultiBuffer { let mut selections_by_buffer: HashMap>> = Default::default(); let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::>(); + let mut cursor = snapshot.excerpts.cursor::>(&()); for selection in selections { let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id); let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id); @@ -1281,7 +1281,7 @@ impl MultiBuffer { let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone(); let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids); - let mut cursor = snapshot.excerpts.cursor::>(); + let mut cursor = snapshot.excerpts.cursor::>(&()); let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right, &()); prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone(); @@ -1388,7 +1388,7 @@ impl MultiBuffer { let mut excerpts = Vec::new(); let snapshot = self.read(cx); let buffers = self.buffers.borrow(); - let mut cursor = snapshot.excerpts.cursor::>(); + let mut cursor = snapshot.excerpts.cursor::>(&()); for locator in buffers .get(&buffer.read(cx).remote_id()) .map(|state| &state.excerpts) @@ -1432,7 +1432,7 @@ impl MultiBuffer { let snapshot = self.read(cx); let position = position.to_offset(&snapshot); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); cursor.seek(&position, Bias::Right, &()); cursor .item() @@ -1459,7 +1459,7 @@ impl MultiBuffer { ) -> Option<(Model, usize, ExcerptId)> { let snapshot = self.read(cx); let offset = point.to_offset(&snapshot); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -1482,7 +1482,7 @@ impl MultiBuffer { ) -> Option<(Model, Point, ExcerptId)> { let snapshot = self.read(cx); let point = point.to_point(&snapshot); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -1507,7 +1507,7 @@ impl MultiBuffer { let end = range.end.to_offset(&snapshot); let mut result = Vec::new(); - let mut cursor = snapshot.excerpts.cursor::(); + let mut cursor = snapshot.excerpts.cursor::(&()); cursor.seek(&start, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -1546,8 +1546,8 @@ impl MultiBuffer { let mut buffers = self.buffers.borrow_mut(); let mut snapshot = self.snapshot.borrow_mut(); - let mut new_excerpts = SumTree::new(); - let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(); + let mut new_excerpts = SumTree::default(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(&()); let mut edits = Vec::new(); let mut excerpt_ids = ids.iter().copied().peekable(); @@ -1801,8 +1801,8 @@ impl MultiBuffer { let ids = ids.into_iter().collect::>(); let snapshot = self.snapshot(cx); let locators = snapshot.excerpt_locators_for_ids(ids.iter().copied()); - let mut new_excerpts = SumTree::new(); - let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(); + let mut new_excerpts = SumTree::default(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(&()); let mut edits = Vec::>::new(); for locator in &locators { @@ -1927,8 +1927,8 @@ impl MultiBuffer { excerpts_to_edit.sort_unstable_by_key(|(locator, _, _)| *locator); let mut edits = Vec::new(); - let mut new_excerpts = SumTree::new(); - let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(); + let mut new_excerpts = SumTree::default(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(&()); for (locator, buffer, buffer_edited) in excerpts_to_edit { new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &()); @@ -2230,7 +2230,7 @@ impl MultiBufferSnapshot { pub fn reversed_chars_at(&self, position: T) -> impl Iterator + '_ { let mut offset = position.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Left, &()); let mut excerpt_chunks = cursor.item().map(|excerpt| { let end_before_footer = cursor.start() + excerpt.text_summary.len; @@ -2357,7 +2357,7 @@ impl MultiBufferSnapshot { return buffer.clip_offset(offset, bias); } - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); let overshoot = if let Some(excerpt) = cursor.item() { let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); @@ -2376,7 +2376,7 @@ impl MultiBufferSnapshot { return buffer.clip_point(point, bias); } - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&point, Bias::Right, &()); let overshoot = if let Some(excerpt) = cursor.item() { let excerpt_start = excerpt.range.context.start.to_point(&excerpt.buffer); @@ -2395,7 +2395,7 @@ impl MultiBufferSnapshot { return buffer.clip_offset_utf16(offset, bias); } - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); let overshoot = if let Some(excerpt) = cursor.item() { let excerpt_start = excerpt.range.context.start.to_offset_utf16(&excerpt.buffer); @@ -2414,7 +2414,7 @@ impl MultiBufferSnapshot { return buffer.clip_point_utf16(point, bias); } - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&point.0, Bias::Right, &()); let overshoot = if let Some(excerpt) = cursor.item() { let excerpt_start = excerpt @@ -2432,7 +2432,7 @@ impl MultiBufferSnapshot { pub fn bytes_in_range(&self, range: Range) -> MultiBufferBytes { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpts = self.excerpts.cursor::(); + let mut excerpts = self.excerpts.cursor::(&()); excerpts.seek(&range.start, Bias::Right, &()); let mut chunk = &[][..]; @@ -2457,7 +2457,7 @@ impl MultiBufferSnapshot { range: Range, ) -> ReversedMultiBufferBytes { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpts = self.excerpts.cursor::(); + let mut excerpts = self.excerpts.cursor::(&()); excerpts.seek(&range.end, Bias::Left, &()); let mut chunk = &[][..]; @@ -2482,7 +2482,7 @@ impl MultiBufferSnapshot { pub fn buffer_rows(&self, start_row: MultiBufferRow) -> MultiBufferRows { let mut result = MultiBufferRows { buffer_row_range: 0..0, - excerpts: self.excerpts.cursor(), + excerpts: self.excerpts.cursor(&()), }; result.seek(start_row); result @@ -2492,7 +2492,7 @@ impl MultiBufferSnapshot { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut chunks = MultiBufferChunks { range: range.clone(), - excerpts: self.excerpts.cursor(), + excerpts: self.excerpts.cursor(&()), excerpt_chunks: None, language_aware, }; @@ -2505,7 +2505,7 @@ impl MultiBufferSnapshot { return buffer.offset_to_point(offset); } - let mut cursor = self.excerpts.cursor::<(usize, Point)>(); + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset, start_point) = cursor.start(); @@ -2526,7 +2526,7 @@ impl MultiBufferSnapshot { return buffer.offset_to_point_utf16(offset); } - let mut cursor = self.excerpts.cursor::<(usize, PointUtf16)>(); + let mut cursor = self.excerpts.cursor::<(usize, PointUtf16)>(&()); cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset, start_point) = cursor.start(); @@ -2547,7 +2547,7 @@ impl MultiBufferSnapshot { return buffer.point_to_point_utf16(point); } - let mut cursor = self.excerpts.cursor::<(Point, PointUtf16)>(); + let mut cursor = self.excerpts.cursor::<(Point, PointUtf16)>(&()); cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset, start_point) = cursor.start(); @@ -2569,7 +2569,7 @@ impl MultiBufferSnapshot { return buffer.point_to_offset(point); } - let mut cursor = self.excerpts.cursor::<(Point, usize)>(); + let mut cursor = self.excerpts.cursor::<(Point, usize)>(&()); cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_point, start_offset) = cursor.start(); @@ -2590,7 +2590,7 @@ impl MultiBufferSnapshot { return buffer.offset_utf16_to_offset(offset_utf16); } - let mut cursor = self.excerpts.cursor::<(OffsetUtf16, usize)>(); + let mut cursor = self.excerpts.cursor::<(OffsetUtf16, usize)>(&()); cursor.seek(&offset_utf16, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset_utf16, start_offset) = cursor.start(); @@ -2612,7 +2612,7 @@ impl MultiBufferSnapshot { return buffer.offset_to_offset_utf16(offset); } - let mut cursor = self.excerpts.cursor::<(usize, OffsetUtf16)>(); + let mut cursor = self.excerpts.cursor::<(usize, OffsetUtf16)>(&()); cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_offset, start_offset_utf16) = cursor.start(); @@ -2636,7 +2636,7 @@ impl MultiBufferSnapshot { return buffer.point_utf16_to_offset(point); } - let mut cursor = self.excerpts.cursor::<(PointUtf16, usize)>(); + let mut cursor = self.excerpts.cursor::<(PointUtf16, usize)>(&()); cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let (start_point, start_offset) = cursor.start(); @@ -2659,7 +2659,7 @@ impl MultiBufferSnapshot { point: T, ) -> Option<(&BufferSnapshot, usize)> { let offset = point.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -2680,7 +2680,7 @@ impl MultiBufferSnapshot { let mut result = BTreeMap::new(); let mut rows_for_excerpt = Vec::new(); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let mut rows = rows.into_iter().peekable(); let mut prev_row = u32::MAX; let mut prev_language_indent_size = IndentSize::default(); @@ -2769,7 +2769,7 @@ impl MultiBufferSnapshot { &self, row: MultiBufferRow, ) -> Option<(&BufferSnapshot, Range)> { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let point = Point::new(row.0, 0); cursor.seek(&point, Bias::Right, &()); if cursor.item().is_none() && *cursor.start() == point { @@ -2803,9 +2803,9 @@ impl MultiBufferSnapshot { D: TextDimension, O: ToOffset, { - let mut summary = D::default(); + let mut summary = D::zero(&()); let mut range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&range.start, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let mut end_before_newline = cursor.end(&()); @@ -2856,7 +2856,7 @@ impl MultiBufferSnapshot { where D: TextDimension + Ord + Sub, { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let locator = self.excerpt_locator_for_id(anchor.excerpt_id); cursor.seek(locator, Bias::Left, &()); @@ -2894,7 +2894,7 @@ impl MultiBufferSnapshot { } let mut anchors = anchors.into_iter().peekable(); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let mut summaries = Vec::new(); while let Some(anchor) = anchors.peek() { let excerpt_id = anchor.excerpt_id; @@ -2949,7 +2949,7 @@ impl MultiBufferSnapshot { I: 'a + IntoIterator, { let mut anchors = anchors.into_iter().enumerate().peekable(); - let mut cursor = self.excerpts.cursor::>(); + let mut cursor = self.excerpts.cursor::>(&()); cursor.next(&()); let mut result = Vec::new(); @@ -3064,7 +3064,7 @@ impl MultiBufferSnapshot { }; } - let mut cursor = self.excerpts.cursor::<(usize, Option)>(); + let mut cursor = self.excerpts.cursor::<(usize, Option)>(&()); cursor.seek(&offset, Bias::Right, &()); if cursor.item().is_none() && offset == cursor.start().0 && bias == Bias::Left { cursor.prev(&()); @@ -3099,7 +3099,7 @@ impl MultiBufferSnapshot { text_anchor: text::Anchor, ) -> Option { let locator = self.excerpt_locator_for_id(excerpt_id); - let mut cursor = self.excerpts.cursor::>(); + let mut cursor = self.excerpts.cursor::>(&()); cursor.seek(locator, Bias::Left, &()); if let Some(excerpt) = cursor.item() { if excerpt.id == excerpt_id { @@ -3139,7 +3139,7 @@ impl MultiBufferSnapshot { ) -> impl Iterator + '_ { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&range.start, Bias::Right, &()); cursor.prev(&()); @@ -3183,7 +3183,7 @@ impl MultiBufferSnapshot { }; let bounds = (start, end); - let mut cursor = self.excerpts.cursor::<(usize, Point)>(); + let mut cursor = self.excerpts.cursor::<(usize, Point)>(&()); cursor.seek(&start_offset, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -3550,7 +3550,7 @@ impl MultiBufferSnapshot { &self, row_range: Range, ) -> impl Iterator> + '_ { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.end.0, 0), Bias::Left, &()); if cursor.item().is_none() { @@ -3617,7 +3617,7 @@ impl MultiBufferSnapshot { &self, row_range: Range, ) -> impl Iterator> + '_ { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.start.0, 0), Bias::Left, &()); @@ -3779,7 +3779,7 @@ impl MultiBufferSnapshot { } else if id == ExcerptId::max() { Locator::max_ref() } else { - let mut cursor = self.excerpt_ids.cursor::(); + let mut cursor = self.excerpt_ids.cursor::(&()); cursor.seek(&id, Bias::Left, &()); if let Some(entry) = cursor.item() { if entry.id == id { @@ -3814,7 +3814,7 @@ impl MultiBufferSnapshot { } } - let mut cursor = self.excerpt_ids.cursor::(); + let mut cursor = self.excerpt_ids.cursor::(&()); for id in sorted_ids { if cursor.seek_forward(&id, Bias::Left, &()) { locators.push(cursor.item().unwrap().locator.clone()); @@ -3839,7 +3839,7 @@ impl MultiBufferSnapshot { &'a self, excerpt_id: ExcerptId, ) -> Option> { - let mut cursor = self.excerpts.cursor::<(Option<&Locator>, T)>(); + let mut cursor = self.excerpts.cursor::<(Option<&Locator>, T)>(&()); let locator = self.excerpt_locator_for_id(excerpt_id); if cursor.seek(&Some(locator), Bias::Left, &()) { let start = cursor.start().1.clone(); @@ -3851,7 +3851,7 @@ impl MultiBufferSnapshot { } fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { - let mut cursor = self.excerpts.cursor::>(); + let mut cursor = self.excerpts.cursor::>(&()); let locator = self.excerpt_locator_for_id(excerpt_id); cursor.seek(&Some(locator), Bias::Left, &()); if let Some(excerpt) = cursor.item() { @@ -3866,7 +3866,7 @@ impl MultiBufferSnapshot { pub fn excerpt_containing(&self, range: Range) -> Option { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&range.start, Bias::Right, &()); let start_excerpt = cursor.item()?; @@ -3891,7 +3891,7 @@ impl MultiBufferSnapshot { I: IntoIterator> + 'a, { let mut ranges = ranges.into_iter().map(|range| range.to_offset(self)); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.next(&()); let mut current_range = ranges.next(); iter::from_fn(move || { @@ -3943,7 +3943,7 @@ impl MultiBufferSnapshot { ranges: impl IntoIterator>, ) -> impl Iterator)> { let mut ranges = ranges.into_iter().map(|range| range.to_offset(self)); - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); cursor.next(&()); let mut current_range = ranges.next(); iter::from_fn(move || { @@ -3980,7 +3980,7 @@ impl MultiBufferSnapshot { range: &'a Range, include_local: bool, ) -> impl 'a + Iterator)> { - let mut cursor = self.excerpts.cursor::(); + let mut cursor = self.excerpts.cursor::(&()); let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id); let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id); cursor.seek(start_locator, Bias::Left, &()); @@ -4519,6 +4519,10 @@ impl sum_tree::KeyedItem for ExcerptIdMapping { impl sum_tree::Summary for ExcerptId { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { *self = *other; } @@ -4527,6 +4531,10 @@ impl sum_tree::Summary for ExcerptId { impl sum_tree::Summary for ExcerptSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { debug_assert!(summary.excerpt_locator > self.excerpt_locator); self.excerpt_locator = summary.excerpt_locator.clone(); @@ -4536,12 +4544,20 @@ impl sum_tree::Summary for ExcerptSummary { } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for TextSummary { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += &summary.text; } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for usize { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.len; } @@ -4566,30 +4582,50 @@ impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for Locator { } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for OffsetUtf16 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.len_utf16; } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Point { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.lines; } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for PointUtf16 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.lines_utf16() } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a Locator> { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self = Some(&summary.excerpt_locator); } } impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self = Some(summary.excerpt_id); } diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index e01b99d472223f..48fcb5dfbb830a 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -137,13 +137,12 @@ impl NotificationStore { return None; } let ix = count - 1 - ix; - let mut cursor = self.notifications.cursor::(); + let mut cursor = self.notifications.cursor::(&()); cursor.seek(&Count(ix), Bias::Right, &()); cursor.item() } - pub fn notification_for_id(&self, id: u64) -> Option<&NotificationEntry> { - let mut cursor = self.notifications.cursor::(); + let mut cursor = self.notifications.cursor::(&()); cursor.seek(&NotificationId(id), Bias::Left, &()); if let Some(item) = cursor.item() { if item.id == id { @@ -372,8 +371,8 @@ impl NotificationStore { is_new: bool, cx: &mut ModelContext<'_, NotificationStore>, ) { - let mut cursor = self.notifications.cursor::<(NotificationId, Count)>(); - let mut new_notifications = SumTree::new(); + let mut cursor = self.notifications.cursor::<(NotificationId, Count)>(&()); + let mut new_notifications = SumTree::default(); let mut old_range = 0..0; for (i, (id, new_notification)) in notifications.into_iter().enumerate() { @@ -468,6 +467,10 @@ impl sum_tree::Item for NotificationEntry { impl sum_tree::Summary for NotificationSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.max_id = self.max_id.max(summary.max_id); self.count += summary.count; @@ -476,6 +479,10 @@ impl sum_tree::Summary for NotificationSummary { } impl<'a> sum_tree::Dimension<'a, NotificationSummary> for NotificationId { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &NotificationSummary, _: &()) { debug_assert!(summary.max_id > self.0); self.0 = summary.max_id; @@ -483,6 +490,10 @@ impl<'a> sum_tree::Dimension<'a, NotificationSummary> for NotificationId { } impl<'a> sum_tree::Dimension<'a, NotificationSummary> for Count { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &NotificationSummary, _: &()) { self.0 += summary.count; } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 35eb20259c1393..24852afd702e18 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -2753,7 +2753,7 @@ impl LspStore { if let Some(language) = buffer.language().cloned() { for adapter in self.languages.lsp_adapters(&language.name()) { if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) { - buffer.update_diagnostics(*server_id, Default::default(), cx); + buffer.update_diagnostics(*server_id, DiagnosticSet::new([], buffer), cx); } } } @@ -5149,7 +5149,11 @@ impl LspStore { self.buffer_store.update(cx, |buffer_store, cx| { for buffer in buffer_store.buffers() { buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics(server_id, Default::default(), cx); + buffer.update_diagnostics( + server_id, + DiagnosticSet::new([], buffer), + cx, + ); }); } }); diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 00ffbfa9ebcde7..56fe7fc0544471 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -36,7 +36,7 @@ impl Rope { } pub fn append(&mut self, rope: Rope) { - let mut chunks = rope.chunks.cursor::<()>(); + let mut chunks = rope.chunks.cursor::<()>(&()); chunks.next(&()); if let Some(chunk) = chunks.item() { if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE) @@ -175,7 +175,7 @@ impl Rope { { // Ensure all chunks except maybe the last one are not underflowing. // Allow some wiggle room for multibyte characters at chunk boundaries. - let mut chunks = self.chunks.cursor::<()>().peekable(); + let mut chunks = self.chunks.cursor::<()>(&()).peekable(); while let Some(chunk) = chunks.next() { if chunks.peek().is_some() { assert!(chunk.0.len() + 3 >= CHUNK_BASE); @@ -245,7 +245,7 @@ impl Rope { if offset >= self.summary().len { return self.summary().len_utf16; } - let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>(); + let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>(&()); cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 @@ -258,7 +258,7 @@ impl Rope { if offset >= self.summary().len_utf16 { return self.summary().len; } - let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>(); + let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>(&()); cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 @@ -271,7 +271,7 @@ impl Rope { if offset >= self.summary().len { return self.summary().lines; } - let mut cursor = self.chunks.cursor::<(usize, Point)>(); + let mut cursor = self.chunks.cursor::<(usize, Point)>(&()); cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 @@ -284,7 +284,7 @@ impl Rope { if offset >= self.summary().len { return self.summary().lines_utf16(); } - let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(); + let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(&()); cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().0; cursor.start().1 @@ -297,7 +297,7 @@ impl Rope { if point >= self.summary().lines { return self.summary().lines_utf16(); } - let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(); + let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(&()); cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().0; cursor.start().1 @@ -310,7 +310,7 @@ impl Rope { if point >= self.summary().lines { return self.summary().len; } - let mut cursor = self.chunks.cursor::<(Point, usize)>(); + let mut cursor = self.chunks.cursor::<(Point, usize)>(&()); cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().0; cursor.start().1 @@ -331,7 +331,7 @@ impl Rope { if point >= self.summary().lines_utf16() { return self.summary().len; } - let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(); + let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(&()); cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().0; cursor.start().1 @@ -344,7 +344,7 @@ impl Rope { if point.0 >= self.summary().lines_utf16() { return self.summary().lines; } - let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(); + let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(&()); cursor.seek(&point.0, Bias::Left, &()); let overshoot = Unclipped(point.0 - cursor.start().0); cursor.start().1 @@ -354,7 +354,7 @@ impl Rope { } pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(&()); cursor.seek(&offset, Bias::Left, &()); if let Some(chunk) = cursor.item() { let mut ix = offset - cursor.start(); @@ -377,7 +377,7 @@ impl Rope { } pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(&()); cursor.seek(&offset, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = offset - cursor.start(); @@ -388,7 +388,7 @@ impl Rope { } pub fn clip_point(&self, point: Point, bias: Bias) -> Point { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(&()); cursor.seek(&point, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = point - cursor.start(); @@ -399,7 +399,7 @@ impl Rope { } pub fn clip_point_utf16(&self, point: Unclipped, bias: Bias) -> PointUtf16 { - let mut cursor = self.chunks.cursor::(); + let mut cursor = self.chunks.cursor::(&()); cursor.seek(&point.0, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = Unclipped(point.0 - cursor.start()); @@ -472,7 +472,7 @@ pub struct Cursor<'a> { impl<'a> Cursor<'a> { pub fn new(rope: &'a Rope, offset: usize) -> Self { - let mut chunks = rope.chunks.cursor(); + let mut chunks = rope.chunks.cursor(&()); chunks.seek(&offset, Bias::Right, &()); Self { rope, @@ -521,7 +521,7 @@ impl<'a> Cursor<'a> { pub fn summary(&mut self, end_offset: usize) -> D { debug_assert!(end_offset >= self.offset); - let mut summary = D::default(); + let mut summary = D::zero(&()); if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); @@ -563,7 +563,7 @@ pub struct Chunks<'a> { impl<'a> Chunks<'a> { pub fn new(rope: &'a Rope, range: Range, reversed: bool) -> Self { - let mut chunks = rope.chunks.cursor(); + let mut chunks = rope.chunks.cursor(&()); let offset = if reversed { chunks.seek(&range.end, Bias::Left, &()); range.end @@ -774,7 +774,7 @@ pub struct Bytes<'a> { impl<'a> Bytes<'a> { pub fn new(rope: &'a Rope, range: Range, reversed: bool) -> Self { - let mut chunks = rope.chunks.cursor(); + let mut chunks = rope.chunks.cursor(&()); if reversed { chunks.seek(&range.end, Bias::Left, &()); } else { @@ -1180,6 +1180,10 @@ impl<'a> From<&'a str> for ChunkSummary { impl sum_tree::Summary for ChunkSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.text += &summary.text; } @@ -1263,6 +1267,10 @@ impl<'a> From<&'a str> for TextSummary { impl sum_tree::Summary for TextSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &Self::Context) { *self += summary; } @@ -1333,6 +1341,10 @@ impl TextDimension for (D1, D2) { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for TextSummary { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += &summary.text; } @@ -1349,6 +1361,10 @@ impl TextDimension for TextSummary { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for usize { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += summary.text.len; } @@ -1365,6 +1381,10 @@ impl TextDimension for usize { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for OffsetUtf16 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += summary.text.len_utf16; } @@ -1381,6 +1401,10 @@ impl TextDimension for OffsetUtf16 { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for Point { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += summary.text.lines; } @@ -1397,6 +1421,10 @@ impl TextDimension for Point { } impl<'a> sum_tree::Dimension<'a, ChunkSummary> for PointUtf16 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { *self += summary.text.lines_utf16(); } @@ -1890,7 +1918,7 @@ mod tests { impl Rope { fn text(&self) -> String { let mut text = String::new(); - for chunk in self.chunks.cursor::<()>() { + for chunk in self.chunks.cursor::<()>(&()) { text.push_str(&chunk.0); } text diff --git a/crates/rope/src/unclipped.rs b/crates/rope/src/unclipped.rs index 937cbca0534d0e..b3427e2cb98bb2 100644 --- a/crates/rope/src/unclipped.rs +++ b/crates/rope/src/unclipped.rs @@ -13,6 +13,10 @@ impl From for Unclipped { impl<'a, T: sum_tree::Dimension<'a, ChunkSummary>> sum_tree::Dimension<'a, ChunkSummary> for Unclipped { + fn zero(_: &()) -> Self { + Self(T::zero(&())) + } + fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) { self.0.add_summary(summary, &()); } diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 452930f942bb97..6da43a8de5ce39 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -28,21 +28,21 @@ where T: Item, D: Dimension<'a, T::Summary>, { - pub fn new(tree: &'a SumTree) -> Self { + pub fn new(tree: &'a SumTree, cx: &::Context) -> Self { Self { tree, stack: ArrayVec::new(), - position: D::default(), + position: D::zero(cx), did_seek: false, at_end: tree.is_empty(), } } - fn reset(&mut self) { + fn reset(&mut self, cx: &::Context) { self.did_seek = false; self.at_end = self.tree.is_empty(); self.stack.truncate(0); - self.position = D::default(); + self.position = D::zero(cx); } pub fn start(&self) -> &D { @@ -192,7 +192,7 @@ where } if self.at_end { - self.position = D::default(); + self.position = D::zero(cx); self.at_end = self.tree.is_empty(); if !self.tree.is_empty() { self.stack.push(StackEntry { @@ -208,7 +208,7 @@ where if let Some(StackEntry { position, .. }) = self.stack.iter().rev().nth(1) { self.position = position.clone(); } else { - self.position = D::default(); + self.position = D::zero(cx); } let entry = self.stack.last_mut().unwrap(); @@ -232,7 +232,7 @@ where if descending { let tree = &child_trees[entry.index]; self.stack.push(StackEntry { - position: D::default(), + position: D::zero(cx), tree, index: tree.0.child_summaries().len() - 1, }) @@ -264,7 +264,7 @@ where self.stack.push(StackEntry { tree: self.tree, index: 0, - position: D::default(), + position: D::zero(cx), }); descend = true; } @@ -364,7 +364,7 @@ where where Target: SeekTarget<'a, T::Summary, D>, { - self.reset(); + self.reset(cx); self.seek_internal(pos, bias, &mut (), cx) } @@ -392,10 +392,10 @@ where Target: SeekTarget<'a, T::Summary, D>, { let mut slice = SliceSeekAggregate { - tree: SumTree::new(), + tree: SumTree::new(cx), leaf_items: ArrayVec::new(), leaf_item_summaries: ArrayVec::new(), - leaf_summary: T::Summary::default(), + leaf_summary: ::zero(cx), }; self.seek_internal(end, bias, &mut slice, cx); slice.tree @@ -417,7 +417,7 @@ where Target: SeekTarget<'a, T::Summary, D>, Output: Dimension<'a, T::Summary>, { - let mut summary = SummarySeekAggregate(Output::default()); + let mut summary = SummarySeekAggregate(Output::zero(cx)); self.seek_internal(end, bias, &mut summary, cx); summary.0 } @@ -443,7 +443,7 @@ where self.stack.push(StackEntry { tree: self.tree, index: 0, - position: Default::default(), + position: D::zero(cx), }); } @@ -633,8 +633,12 @@ where T: Item, D: Dimension<'a, T::Summary>, { - pub fn new(tree: &'a SumTree, filter_node: F) -> Self { - let cursor = tree.cursor::(); + pub fn new( + tree: &'a SumTree, + cx: &::Context, + filter_node: F, + ) -> Self { + let cursor = tree.cursor::(cx); Self { cursor, filter_node, @@ -727,7 +731,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate { fn end_leaf(&mut self, cx: &::Context) { self.tree.append( SumTree(Arc::new(Node::Leaf { - summary: mem::take(&mut self.leaf_summary), + summary: mem::replace(&mut self.leaf_summary, ::zero(cx)), items: mem::take(&mut self.leaf_items), item_summaries: mem::take(&mut self.leaf_item_summaries), })), diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 8a8027408f3014..ca351d67cea76f 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -34,9 +34,11 @@ pub trait KeyedItem: Item { /// /// Each Summary type can have multiple [`Dimensions`] that it measures, /// which can be used to navigate the tree -pub trait Summary: Default + Clone + fmt::Debug { +pub trait Summary: Clone + fmt::Debug { type Context; + fn zero(cx: &Self::Context) -> Self; + fn add_summary(&mut self, summary: &Self, cx: &Self::Context); } @@ -47,17 +49,23 @@ pub trait Summary: Default + Clone + fmt::Debug { /// # Example: /// Zed's rope has a `TextSummary` type that summarizes lines, characters, and bytes. /// Each of these are different dimensions we may want to seek to -pub trait Dimension<'a, S: Summary>: Clone + fmt::Debug + Default { - fn add_summary(&mut self, _summary: &'a S, _: &S::Context); +pub trait Dimension<'a, S: Summary>: Clone + fmt::Debug { + fn zero(cx: &S::Context) -> Self; + + fn add_summary(&mut self, summary: &'a S, cx: &S::Context); fn from_summary(summary: &'a S, cx: &S::Context) -> Self { - let mut dimension = Self::default(); + let mut dimension = Self::zero(cx); dimension.add_summary(summary, cx); dimension } } impl<'a, T: Summary> Dimension<'a, T> for T { + fn zero(cx: &T::Context) -> Self { + Summary::zero(cx) + } + fn add_summary(&mut self, summary: &'a T, cx: &T::Context) { Summary::add_summary(self, summary, cx); } @@ -74,10 +82,18 @@ impl<'a, S: Summary, D: Dimension<'a, S> + Ord> SeekTarget<'a, S, D> for D { } impl<'a, T: Summary> Dimension<'a, T> for () { + fn zero(_: &T::Context) -> Self { + () + } + fn add_summary(&mut self, _: &'a T, _: &T::Context) {} } impl<'a, T: Summary, D1: Dimension<'a, T>, D2: Dimension<'a, T>> Dimension<'a, T> for (D1, D2) { + fn zero(cx: &T::Context) -> Self { + (D1::zero(cx), D2::zero(cx)) + } + fn add_summary(&mut self, summary: &'a T, cx: &T::Context) { self.0.add_summary(summary, cx); self.1.add_summary(summary, cx); @@ -161,16 +177,16 @@ impl Bias { pub struct SumTree(Arc>); impl SumTree { - pub fn new() -> Self { + pub fn new(cx: &::Context) -> Self { SumTree(Arc::new(Node::Leaf { - summary: T::Summary::default(), + summary: ::zero(cx), items: ArrayVec::new(), item_summaries: ArrayVec::new(), })) } pub fn from_item(item: T, cx: &::Context) -> Self { - let mut tree = Self::new(); + let mut tree = Self::new(cx); tree.push(item, cx); tree } @@ -206,7 +222,7 @@ impl SumTree { let mut current_parent_node = None; for child_node in nodes.drain(..) { let parent_node = current_parent_node.get_or_insert_with(|| Node::Internal { - summary: T::Summary::default(), + summary: ::zero(cx), height, child_summaries: ArrayVec::new(), child_trees: ArrayVec::new(), @@ -234,7 +250,7 @@ impl SumTree { } if nodes.is_empty() { - Self::new() + Self::new(cx) } else { debug_assert_eq!(nodes.len(), 1); Self(Arc::new(nodes.pop().unwrap())) @@ -296,7 +312,7 @@ impl SumTree { } if nodes.is_empty() { - Self::new() + Self::new(cx) } else { debug_assert_eq!(nodes.len(), 1); nodes.pop().unwrap() @@ -306,7 +322,7 @@ impl SumTree { #[allow(unused)] pub fn items(&self, cx: &::Context) -> Vec { let mut items = Vec::new(); - let mut cursor = self.cursor::<()>(); + let mut cursor = self.cursor::<()>(cx); cursor.next(cx); while let Some(item) = cursor.item() { items.push(item.clone()); @@ -319,21 +335,25 @@ impl SumTree { Iter::new(self) } - pub fn cursor<'a, S>(&'a self) -> Cursor + pub fn cursor<'a, S>(&'a self, cx: &::Context) -> Cursor where S: Dimension<'a, T::Summary>, { - Cursor::new(self) + Cursor::new(self, cx) } /// Note: If the summary type requires a non `()` context, then the filter cursor /// that is returned cannot be used with Rust's iterators. - pub fn filter<'a, F, U>(&'a self, filter_node: F) -> FilterCursor + pub fn filter<'a, F, U>( + &'a self, + cx: &::Context, + filter_node: F, + ) -> FilterCursor where F: FnMut(&T::Summary) -> bool, U: Dimension<'a, T::Summary>, { - FilterCursor::new(self, filter_node) + FilterCursor::new(self, cx, filter_node) } #[allow(dead_code)] @@ -389,7 +409,7 @@ impl SumTree { &'a self, cx: &::Context, ) -> D { - let mut extent = D::default(); + let mut extent = D::zero(cx); match self.0.as_ref() { Node::Internal { summary, .. } | Node::Leaf { summary, .. } => { extent.add_summary(summary, cx); @@ -636,7 +656,7 @@ impl SumTree { ) -> Option { let mut replaced = None; *self = { - let mut cursor = self.cursor::(); + let mut cursor = self.cursor::(cx); let mut new_tree = cursor.slice(&item.key(), Bias::Left, cx); if let Some(cursor_item) = cursor.item() { if cursor_item.key() == item.key() { @@ -654,7 +674,7 @@ impl SumTree { pub fn remove(&mut self, key: &T::Key, cx: &::Context) -> Option { let mut removed = None; *self = { - let mut cursor = self.cursor::(); + let mut cursor = self.cursor::(cx); let mut new_tree = cursor.slice(key, Bias::Left, cx); if let Some(item) = cursor.item() { if item.key() == *key { @@ -681,11 +701,11 @@ impl SumTree { edits.sort_unstable_by_key(|item| item.key()); *self = { - let mut cursor = self.cursor::(); - let mut new_tree = SumTree::new(); + let mut cursor = self.cursor::(cx); + let mut new_tree = SumTree::new(cx); let mut buffered_items = Vec::new(); - cursor.seek(&T::Key::default(), Bias::Left, cx); + cursor.seek(&T::Key::zero(cx), Bias::Left, cx); for edit in edits { let new_key = edit.key(); let mut old_item = cursor.item(); @@ -724,7 +744,7 @@ impl SumTree { } pub fn get(&self, key: &T::Key, cx: &::Context) -> Option<&T> { - let mut cursor = self.cursor::(); + let mut cursor = self.cursor::(cx); if cursor.seek(key, Bias::Left, cx) { cursor.item() } else { @@ -733,9 +753,13 @@ impl SumTree { } } -impl Default for SumTree { +impl Default for SumTree +where + T: Item, + S: Summary, +{ fn default() -> Self { - Self::new() + Self::new(&()) } } @@ -824,7 +848,7 @@ where T: 'a + Summary, I: Iterator, { - let mut sum = T::default(); + let mut sum = T::zero(cx); for value in iter { sum.add_summary(value, cx); } @@ -846,10 +870,10 @@ mod tests { #[test] fn test_extend_and_push_tree() { - let mut tree1 = SumTree::new(); + let mut tree1 = SumTree::default(); tree1.extend(0..20, &()); - let mut tree2 = SumTree::new(); + let mut tree2 = SumTree::default(); tree2.extend(50..100, &()); tree1.append(tree2, &()); @@ -877,7 +901,7 @@ mod tests { let mut rng = StdRng::seed_from_u64(seed); let rng = &mut rng; - let mut tree = SumTree::::new(); + let mut tree = SumTree::::default(); let count = rng.gen_range(0..10); if rng.gen() { tree.extend(rng.sample_iter(distributions::Standard).take(count), &()); @@ -903,7 +927,7 @@ mod tests { reference_items.splice(splice_start..splice_end, new_items.clone()); tree = { - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &()); if rng.gen() { new_tree.extend(new_items, &()); @@ -918,12 +942,13 @@ mod tests { assert_eq!(tree.items(&()), reference_items); assert_eq!( tree.iter().collect::>(), - tree.cursor::<()>().collect::>() + tree.cursor::<()>(&()).collect::>() ); log::info!("tree items: {:?}", tree.items(&())); - let mut filter_cursor = tree.filter::<_, Count>(|summary| summary.contains_even); + let mut filter_cursor = + tree.filter::<_, Count>(&(), |summary| summary.contains_even); let expected_filtered_items = tree .items(&()) .into_iter() @@ -964,7 +989,7 @@ mod tests { assert_eq!(filter_cursor.item(), None); let mut before_start = false; - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); let start_pos = rng.gen_range(0..=reference_items.len()); cursor.seek(&Count(start_pos), Bias::Right, &()); let mut pos = rng.gen_range(start_pos..=reference_items.len()); @@ -1015,7 +1040,7 @@ mod tests { let start_bias = if rng.gen() { Bias::Left } else { Bias::Right }; let end_bias = if rng.gen() { Bias::Left } else { Bias::Right }; - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); cursor.seek(&Count(start), start_bias, &()); let slice = cursor.slice(&Count(end), end_bias, &()); @@ -1030,8 +1055,8 @@ mod tests { #[test] fn test_cursor() { // Empty tree - let tree = SumTree::::new(); - let mut cursor = tree.cursor::(); + let tree = SumTree::::default(); + let mut cursor = tree.cursor::(&()); assert_eq!( cursor.slice(&Count(0), Bias::Right, &()).items(&()), Vec::::new() @@ -1052,9 +1077,9 @@ mod tests { assert_eq!(cursor.start().sum, 0); // Single-element tree - let mut tree = SumTree::::new(); + let mut tree = SumTree::::default(); tree.extend(vec![1], &()); - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); assert_eq!( cursor.slice(&Count(0), Bias::Right, &()).items(&()), Vec::::new() @@ -1076,7 +1101,7 @@ mod tests { assert_eq!(cursor.next_item(), None); assert_eq!(cursor.start().sum, 0); - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); assert_eq!(cursor.slice(&Count(1), Bias::Right, &()).items(&()), [1]); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); @@ -1096,9 +1121,9 @@ mod tests { assert_eq!(cursor.start().sum, 1); // Multiple-element tree - let mut tree = SumTree::new(); + let mut tree = SumTree::default(); tree.extend(vec![1, 2, 3, 4, 5, 6], &()); - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); assert_eq!(cursor.slice(&Count(2), Bias::Right, &()).items(&()), [1, 2]); assert_eq!(cursor.item(), Some(&3)); @@ -1179,7 +1204,7 @@ mod tests { assert_eq!(cursor.next_item(), Some(&2)); assert_eq!(cursor.start().sum, 0); - let mut cursor = tree.cursor::(); + let mut cursor = tree.cursor::(&()); assert_eq!( cursor .slice(&tree.extent::(&()), Bias::Right, &()) @@ -1227,7 +1252,7 @@ mod tests { #[test] fn test_edit() { - let mut tree = SumTree::::new(); + let mut tree = SumTree::::default(); let removed = tree.edit(vec![Edit::Insert(1), Edit::Insert(2), Edit::Insert(0)], &()); assert_eq!(tree.items(&()), vec![0, 1, 2]); @@ -1305,6 +1330,10 @@ mod tests { impl Summary for IntegersSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { self.count += other.count; self.sum += other.sum; @@ -1314,12 +1343,20 @@ mod tests { } impl<'a> Dimension<'a, IntegersSummary> for u8 { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &IntegersSummary, _: &()) { *self = summary.max; } } impl<'a> Dimension<'a, IntegersSummary> for Count { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &IntegersSummary, _: &()) { self.0 += summary.count; } @@ -1332,6 +1369,10 @@ mod tests { } impl<'a> Dimension<'a, IntegersSummary> for Sum { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &IntegersSummary, _: &()) { self.0 += summary.sum; } diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 53bb0a807c23df..72465b1a99cabd 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -53,7 +53,7 @@ impl TreeMap { } pub fn get(&self, key: &K) -> Option<&V> { - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &()); if let Some(item) = cursor.item() { if Some(key) == item.key().0.as_ref() { @@ -72,7 +72,7 @@ impl TreeMap { pub fn remove(&mut self, key: &K) -> Option { let mut removed = None; - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let key = MapKeyRef(Some(key)); let mut new_tree = cursor.slice(&key, Bias::Left, &()); if key.cmp(&cursor.end(&()), &()) == Ordering::Equal { @@ -88,7 +88,7 @@ impl TreeMap { pub fn remove_range(&mut self, start: &impl MapSeekTarget, end: &impl MapSeekTarget) { let start = MapSeekTargetAdaptor(start); let end = MapSeekTargetAdaptor(end); - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let mut new_tree = cursor.slice(&start, Bias::Left, &()); cursor.seek(&end, Bias::Left, &()); new_tree.append(cursor.suffix(&()), &()); @@ -98,7 +98,7 @@ impl TreeMap { /// Returns the key-value pair with the greatest key less than or equal to the given key. pub fn closest(&self, key: &K) -> Option<(&K, &V)> { - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let key = MapKeyRef(Some(key)); cursor.seek(&key, Bias::Right, &()); cursor.prev(&()); @@ -106,7 +106,7 @@ impl TreeMap { } pub fn iter_from<'a>(&'a self, from: &'a K) -> impl Iterator + '_ { - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let from_key = MapKeyRef(Some(from)); cursor.seek(&from_key, Bias::Left, &()); @@ -117,7 +117,7 @@ impl TreeMap { where F: FnOnce(&mut V) -> T, { - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); let key = MapKeyRef(Some(key)); let mut new_tree = cursor.slice(&key, Bias::Left, &()); let mut result = None; @@ -136,7 +136,7 @@ impl TreeMap { pub fn retain bool>(&mut self, mut predicate: F) { let mut new_map = SumTree::>::default(); - let mut cursor = self.0.cursor::>(); + let mut cursor = self.0.cursor::>(&()); cursor.next(&()); while let Some(item) = cursor.item() { if predicate(&item.key, &item.value) { @@ -247,6 +247,10 @@ where { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { *self = summary.clone() } @@ -256,6 +260,10 @@ impl<'a, K> Dimension<'a, MapKey> for MapKeyRef<'a, K> where K: Clone + Debug + Ord, { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a MapKey, _: &()) { self.0 = summary.0.as_ref(); } diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index ee833326f58099..3bc5889caeb700 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -100,7 +100,7 @@ impl Anchor { false } else { let fragment_id = buffer.fragment_id_for_anchor(self); - let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>(&None); fragment_cursor.seek(&Some(fragment_id), Bias::Left, &None); fragment_cursor .item() diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index 83d57016c5d753..7afc16f5815496 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -85,6 +85,10 @@ impl sum_tree::KeyedItem for Locator { impl sum_tree::Summary for Locator { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { self.assign(summary); } diff --git a/crates/text/src/operation_queue.rs b/crates/text/src/operation_queue.rs index 063f05066570f6..c7964f62674ca5 100644 --- a/crates/text/src/operation_queue.rs +++ b/crates/text/src/operation_queue.rs @@ -34,7 +34,7 @@ impl Default for OperationQueue { impl OperationQueue { pub fn new() -> Self { - OperationQueue(SumTree::new()) + OperationQueue(SumTree::default()) } pub fn len(&self) -> usize { @@ -58,7 +58,7 @@ impl OperationQueue { pub fn drain(&mut self) -> Self { let clone = self.clone(); - self.0 = SumTree::new(); + self.0 = SumTree::default(); clone } @@ -70,6 +70,10 @@ impl OperationQueue { impl Summary for OperationSummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &()) { assert!(self.key < other.key); self.key = other.key; @@ -90,6 +94,10 @@ impl<'a> Add<&'a Self> for OperationSummary { } impl<'a> Dimension<'a, OperationSummary> for OperationKey { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &OperationSummary, _: &()) { assert!(*self <= summary.key); *self = summary.key; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index b17748c6d067aa..9630ec5b80334c 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -492,6 +492,10 @@ struct FragmentTextSummary { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { + fn zero(_: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { self.visible += summary.text.visible; self.deleted += summary.text.deleted; @@ -654,8 +658,8 @@ impl Buffer { normalized: Rope, ) -> Buffer { let history = History::new(normalized); - let mut fragments = SumTree::new(); - let mut insertions = SumTree::new(); + let mut fragments = SumTree::new(&None); + let mut insertions = SumTree::default(); let mut lamport_clock = clock::Lamport::new(replica_id); let mut version = clock::Global::new(); @@ -772,7 +776,7 @@ impl Buffer { let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - let mut old_fragments = self.fragments.cursor::(); + let mut old_fragments = self.fragments.cursor::(&None); let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None); new_ropes.append(new_fragments.summary().text); @@ -992,7 +996,7 @@ impl Buffer { let mut insertion_offset = 0; let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(); + let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(&cx); let mut new_fragments = old_fragments.slice( &VersionedFullOffset::Offset(ranges[0].start), Bias::Left, @@ -1185,7 +1189,7 @@ impl Buffer { // Get all of the fragments corresponding to these insertion slices. let mut fragment_ids = Vec::new(); - let mut insertions_cursor = self.insertions.cursor::(); + let mut insertions_cursor = self.insertions.cursor::(&()); for insertion_slice in &insertion_slices { if insertion_slice.insertion_id != insertions_cursor.start().timestamp || insertion_slice.range.start > insertions_cursor.start().split_offset @@ -1217,8 +1221,8 @@ impl Buffer { self.snapshot.undo_map.insert(undo); let mut edits = Patch::default(); - let mut old_fragments = self.fragments.cursor::<(Option<&Locator>, usize)>(); - let mut new_fragments = SumTree::new(); + let mut old_fragments = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); + let mut new_fragments = SumTree::new(&None); let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); @@ -1455,7 +1459,7 @@ impl Buffer { D: TextDimension, { // get fragment ranges - let mut cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); let offset_ranges = self .fragment_ids_for_edits(transaction.edit_ids.iter()) .into_iter() @@ -1485,7 +1489,7 @@ impl Buffer { }); // convert to the desired text dimension. - let mut position = D::default(); + let mut position = D::zero(&()); let mut rope_cursor = self.visible_text.cursor(0); disjoint_ranges.map(move |range| { position.add_assign(&rope_cursor.summary(range.start)); @@ -1665,8 +1669,8 @@ impl Buffer { ); } - let mut cursor = self.snapshot.fragments.cursor::>(); - for insertion_fragment in self.snapshot.insertions.cursor::<()>() { + let mut cursor = self.snapshot.fragments.cursor::>(&None); + for insertion_fragment in self.snapshot.insertions.cursor::<()>(&()) { cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None); let fragment = cursor.item().unwrap(); assert_eq!(insertion_fragment.fragment_id, fragment.id); @@ -1783,7 +1787,7 @@ impl BufferSnapshot { let mut cursor = self .fragments - .filter::<_, FragmentTextSummary>(move |summary| { + .filter::<_, FragmentTextSummary>(&None, move |summary| { !version.observed_all(&summary.max_version) }); cursor.next(&None); @@ -2110,14 +2114,14 @@ impl BufferSnapshot { A: 'a + IntoIterator, { let anchors = anchors.into_iter(); - let mut insertion_cursor = self.insertions.cursor::(); - let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut insertion_cursor = self.insertions.cursor::(&()); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); let mut text_cursor = self.visible_text.cursor(0); - let mut position = D::default(); + let mut position = D::zero(&()); anchors.map(move |(anchor, payload)| { if *anchor == Anchor::MIN { - return (D::default(), payload); + return (D::zero(&()), payload); } else if *anchor == Anchor::MAX { return (D::from_text_summary(&self.visible_text.summary()), payload); } @@ -2159,7 +2163,7 @@ impl BufferSnapshot { D: TextDimension, { if *anchor == Anchor::MIN { - D::default() + D::zero(&()) } else if *anchor == Anchor::MAX { D::from_text_summary(&self.visible_text.summary()) } else { @@ -2167,7 +2171,7 @@ impl BufferSnapshot { timestamp: anchor.timestamp, split_offset: anchor.offset, }; - let mut insertion_cursor = self.insertions.cursor::(); + let mut insertion_cursor = self.insertions.cursor::(&()); insertion_cursor.seek(&anchor_key, anchor.bias, &()); if let Some(insertion) = insertion_cursor.item() { let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); @@ -2192,7 +2196,7 @@ impl BufferSnapshot { ); }; - let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(&None); fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); let fragment = fragment_cursor.item().unwrap(); let mut fragment_offset = fragment_cursor.start().1; @@ -2213,7 +2217,7 @@ impl BufferSnapshot { timestamp: anchor.timestamp, split_offset: anchor.offset, }; - let mut insertion_cursor = self.insertions.cursor::(); + let mut insertion_cursor = self.insertions.cursor::(&()); insertion_cursor.seek(&anchor_key, anchor.bias, &()); if let Some(insertion) = insertion_cursor.item() { let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); @@ -2263,7 +2267,7 @@ impl BufferSnapshot { } else if bias == Bias::Right && offset == self.len() { Anchor::MAX } else { - let mut fragment_cursor = self.fragments.cursor::(); + let mut fragment_cursor = self.fragments.cursor::(&None); fragment_cursor.seek(&offset, bias, &None); let fragment = fragment_cursor.item().unwrap(); let overshoot = offset - *fragment_cursor.start(); @@ -2341,15 +2345,15 @@ impl BufferSnapshot { let fragments_cursor = if *since == self.version { None } else { - let mut cursor = self - .fragments - .filter(move |summary| !since.observed_all(&summary.max_version)); + let mut cursor = self.fragments.filter(&None, move |summary| { + !since.observed_all(&summary.max_version) + }); cursor.next(&None); Some(cursor) }; let mut cursor = self .fragments - .cursor::<(Option<&Locator>, FragmentTextSummary)>(); + .cursor::<(Option<&Locator>, FragmentTextSummary)>(&None); let start_fragment_id = self.fragment_id_for_anchor(&range.start); cursor.seek(&Some(start_fragment_id), Bias::Left, &None); @@ -2371,8 +2375,8 @@ impl BufferSnapshot { fragments_cursor, undos: &self.undo_map, since, - old_end: Default::default(), - new_end: Default::default(), + old_end: D::zero(&()), + new_end: D::zero(&()), range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset), buffer_id: self.remote_id, } @@ -2382,9 +2386,9 @@ impl BufferSnapshot { if *since != self.version { let start_fragment_id = self.fragment_id_for_anchor(&range.start); let end_fragment_id = self.fragment_id_for_anchor(&range.end); - let mut cursor = self - .fragments - .filter::<_, usize>(move |summary| !since.observed_all(&summary.max_version)); + let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| { + !since.observed_all(&summary.max_version) + }); cursor.next(&None); while let Some(fragment) = cursor.item() { if fragment.id > *end_fragment_id { @@ -2405,9 +2409,9 @@ impl BufferSnapshot { pub fn has_edits_since(&self, since: &clock::Global) -> bool { if *since != self.version { - let mut cursor = self - .fragments - .filter::<_, usize>(move |summary| !since.observed_all(&summary.max_version)); + let mut cursor = self.fragments.filter::<_, usize>(&None, move |summary| { + !since.observed_all(&summary.max_version) + }); cursor.next(&None); while let Some(fragment) = cursor.item() { let was_visible = fragment.was_visible(since, &self.undo_map); @@ -2644,6 +2648,10 @@ impl sum_tree::Item for Fragment { impl sum_tree::Summary for FragmentSummary { type Context = Option; + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, other: &Self, _: &Self::Context) { self.max_id.assign(&other.max_id); self.text.visible += &other.text.visible; @@ -2704,6 +2712,10 @@ impl InsertionFragment { impl sum_tree::Summary for InsertionFragmentKey { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &()) { *self = *summary; } @@ -2736,18 +2748,30 @@ impl ops::Sub for FullOffset { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize { + fn zero(_: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { *self += summary.text.visible; } } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { + fn zero(_: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { self.0 += summary.text.visible + summary.text.deleted; } } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> { + fn zero(_: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { *self = Some(&summary.max_id); } @@ -2786,6 +2810,10 @@ impl Default for VersionedFullOffset { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset { + fn zero(_cx: &Option) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { if let Self::Offset(offset) = self { let version = cx.as_ref().unwrap(); diff --git a/crates/text/src/undo_map.rs b/crates/text/src/undo_map.rs index f95809c02e2472..4e670fd456068d 100644 --- a/crates/text/src/undo_map.rs +++ b/crates/text/src/undo_map.rs @@ -33,6 +33,10 @@ struct UndoMapKey { impl sum_tree::Summary for UndoMapKey { type Context = (); + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &Self::Context) { *self = cmp::max(*self, *summary); } @@ -62,9 +66,8 @@ impl UndoMap { pub fn is_undone(&self, edit_id: clock::Lamport) -> bool { self.undo_count(edit_id) % 2 == 1 } - pub fn was_undone(&self, edit_id: clock::Lamport, version: &clock::Global) -> bool { - let mut cursor = self.0.cursor::(); + let mut cursor = self.0.cursor::(&()); cursor.seek( &UndoMapKey { edit_id, @@ -89,7 +92,7 @@ impl UndoMap { } pub fn undo_count(&self, edit_id: clock::Lamport) -> u32 { - let mut cursor = self.0.cursor::(); + let mut cursor = self.0.cursor::(&()); cursor.seek( &UndoMapKey { edit_id, diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 776c01c49c2868..d8555b71a4f67c 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -2049,7 +2049,7 @@ impl Snapshot { fn delete_entry(&mut self, entry_id: ProjectEntryId) -> Option> { let removed_entry = self.entries_by_id.remove(&entry_id, &())?; self.entries_by_path = { - let mut cursor = self.entries_by_path.cursor::(); + let mut cursor = self.entries_by_path.cursor::(&()); let mut new_entries_by_path = cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &()); while let Some(entry) = cursor.item() { @@ -2192,7 +2192,7 @@ impl Snapshot { include_ignored: bool, start_offset: usize, ) -> Traversal { - let mut cursor = self.entries_by_path.cursor(); + let mut cursor = self.entries_by_path.cursor(&()); cursor.seek( &TraversalTarget::Count { count: start_offset, @@ -2302,7 +2302,7 @@ impl Snapshot { pub fn propagate_git_statuses(&self, result: &mut [Entry]) { let mut cursor = self .entries_by_path - .cursor::<(TraversalProgress, GitStatuses)>(); + .cursor::<(TraversalProgress, GitStatuses)>(&()); let mut entry_stack = Vec::<(usize, GitStatuses)>::new(); let mut result_ix = 0; @@ -2358,13 +2358,13 @@ impl Snapshot { pub fn paths(&self) -> impl Iterator> { let empty_path = Path::new(""); self.entries_by_path - .cursor::<()>() + .cursor::<()>(&()) .filter(move |entry| entry.path.as_ref() != empty_path) .map(|entry| &entry.path) } pub fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> { - let mut cursor = self.entries_by_path.cursor(); + let mut cursor = self.entries_by_path.cursor(&()); cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &()); let traversal = Traversal { cursor, @@ -2581,7 +2581,7 @@ impl LocalSnapshot { #[cfg(test)] pub(crate) fn expanded_entries(&self) -> impl Iterator { self.entries_by_path - .cursor::<()>() + .cursor::<()>(&()) .filter(|entry| entry.kind == EntryKind::Dir && (entry.is_external || entry.is_ignored)) } @@ -2591,11 +2591,11 @@ impl LocalSnapshot { assert_eq!( self.entries_by_path - .cursor::<()>() + .cursor::<()>(&()) .map(|e| (&e.path, e.id)) .collect::>(), self.entries_by_id - .cursor::<()>() + .cursor::<()>(&()) .map(|e| (&e.path, e.id)) .collect::>() .into_iter() @@ -2605,7 +2605,7 @@ impl LocalSnapshot { let mut files = self.files(true, 0); let mut visible_files = self.files(false, 0); - for entry in self.entries_by_path.cursor::<()>() { + for entry in self.entries_by_path.cursor::<()>(&()) { if entry.is_file() { assert_eq!(files.next().unwrap().inode, entry.inode); if !entry.is_ignored && !entry.is_external { @@ -2633,7 +2633,7 @@ impl LocalSnapshot { let dfs_paths_via_iter = self .entries_by_path - .cursor::<()>() + .cursor::<()>(&()) .map(|e| e.path.as_ref()) .collect::>(); assert_eq!(bfs_paths, dfs_paths_via_iter); @@ -2679,7 +2679,7 @@ impl LocalSnapshot { #[cfg(test)] pub fn entries_without_ids(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> { let mut paths = Vec::new(); - for entry in self.entries_by_path.cursor::<()>() { + for entry in self.entries_by_path.cursor::<()>(&()) { if include_ignored || !entry.is_ignored { paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored)); } @@ -2839,7 +2839,10 @@ impl BackgroundScannerState { let mut new_entries; let removed_entries; { - let mut cursor = self.snapshot.entries_by_path.cursor::(); + let mut cursor = self + .snapshot + .entries_by_path + .cursor::(&()); new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &()); removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &()); new_entries.append(cursor.suffix(&()), &()); @@ -2847,7 +2850,7 @@ impl BackgroundScannerState { self.snapshot.entries_by_path = new_entries; let mut removed_ids = Vec::with_capacity(removed_entries.summary().count); - for entry in removed_entries.cursor::<()>() { + for entry in removed_entries.cursor::<()>(&()) { match self.removed_entries.entry(entry.inode) { hash_map::Entry::Occupied(mut e) => { let prev_removed_entry = e.get_mut(); @@ -3403,6 +3406,10 @@ impl Default for EntrySummary { impl sum_tree::Summary for EntrySummary { type Context = (); + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, rhs: &Self, _: &()) { self.max_path = rhs.max_path.clone(); self.count += rhs.count; @@ -3445,12 +3452,20 @@ struct PathEntrySummary { impl sum_tree::Summary for PathEntrySummary { type Context = (); + fn zero(_cx: &Self::Context) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &Self, _: &Self::Context) { self.max_id = summary.max_id; } } impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) { *self = summary.max_id; } @@ -3466,6 +3481,10 @@ impl Default for PathKey { } impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { self.0 = summary.max_path.clone(); } @@ -4629,8 +4648,8 @@ impl BackgroundScanner { // Identify which paths have changed. Use the known set of changed // parent paths to optimize the search. let mut changes = Vec::new(); - let mut old_paths = old_snapshot.entries_by_path.cursor::(); - let mut new_paths = new_snapshot.entries_by_path.cursor::(); + let mut old_paths = old_snapshot.entries_by_path.cursor::(&()); + let mut new_paths = new_snapshot.entries_by_path.cursor::(&()); let mut last_newly_loaded_dir_path = None; old_paths.next(&()); new_paths.next(&()); @@ -4981,6 +5000,10 @@ impl<'a> TraversalProgress<'a> { } impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { self.max_path = summary.max_path.as_ref(); self.count += summary.count; @@ -5030,6 +5053,10 @@ impl Sub for GitStatuses { } impl<'a> sum_tree::Dimension<'a, EntrySummary> for GitStatuses { + fn zero(_cx: &()) -> Self { + Default::default() + } + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { *self += summary.statuses } @@ -5050,7 +5077,7 @@ impl<'a> Traversal<'a> { include_ignored: bool, start_path: &Path, ) -> Self { - let mut cursor = entries.cursor(); + let mut cursor = entries.cursor(&()); cursor.seek(&TraversalTarget::Path(start_path), Bias::Left, &()); let mut traversal = Self { cursor, From 8e30229ec974722fd08daac9509eca27a93a00b4 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Tue, 17 Sep 2024 22:09:59 -0400 Subject: [PATCH 170/762] Fix nix shell (#17982) Recently `cmake` was added as a build-time dependency to the wasm runtime. This adds that dependency to our nix shell env. Release Notes: - N/A --- flake.lock | 29 ++++++++++++----------------- flake.nix | 5 +---- nix/shell.nix | 1 + 3 files changed, 14 insertions(+), 21 deletions(-) diff --git a/flake.lock b/flake.lock index f996e1e5776458..2b421a9efb8b99 100644 --- a/flake.lock +++ b/flake.lock @@ -1,17 +1,12 @@ { "nodes": { "crane": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ] - }, "locked": { - "lastModified": 1724537630, - "narHash": "sha256-gpqINM71zp3kw5XYwUXa84ZtPnCmLLnByuFoYesT1bY=", + "lastModified": 1725409566, + "narHash": "sha256-PrtLmqhM6UtJP7v7IGyzjBFhbG4eOAHT6LPYOFmYfbk=", "owner": "ipetkov", "repo": "crane", - "rev": "3e08f4b1fc9aaede5dd511d8f5f4ef27501e49b0", + "rev": "7e4586bad4e3f8f97a9271def747cf58c4b68f3c", "type": "github" }, "original": { @@ -28,11 +23,11 @@ "rust-analyzer-src": "rust-analyzer-src" }, "locked": { - "lastModified": 1724740262, - "narHash": "sha256-cpFasbzOTlwLi4fNas6hDznVUdCJn/lMLxi7MAMG6hg=", + "lastModified": 1726554553, + "narHash": "sha256-xakDhIS1c1VgJc/NMOLj05yBsTdlXKMEYz6wC8Hdshc=", "owner": "nix-community", "repo": "fenix", - "rev": "703efdd9b5c6a7d5824afa348a24fbbf8ff226be", + "rev": "1f59d7585aa06d2c327960d397bea4067d8fee98", "type": "github" }, "original": { @@ -58,11 +53,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1724479785, - "narHash": "sha256-pP3Azj5d6M5nmG68Fu4JqZmdGt4S4vqI5f8te+E/FTw=", + "lastModified": 1726463316, + "narHash": "sha256-gI9kkaH0ZjakJOKrdjaI/VbaMEo9qBbSUl93DnU7f4c=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "d0e1602ddde669d5beb01aec49d71a51937ed7be", + "rev": "99dc8785f6a0adac95f5e2ab05cc2e1bf666d172", "type": "github" }, "original": { @@ -83,11 +78,11 @@ "rust-analyzer-src": { "flake": false, "locked": { - "lastModified": 1724666781, - "narHash": "sha256-nOQDgjTDlWe0/+Ptf3o2p6UrznQFHnXBHRV1ZAsSpe8=", + "lastModified": 1726443025, + "narHash": "sha256-nCmG4NJpwI0IoIlYlwtDwVA49yuspA2E6OhfCOmiArQ=", "owner": "rust-lang", "repo": "rust-analyzer", - "rev": "095926ea6f008477a15a2ec6b0b8797e2e5be0e5", + "rev": "94b526fc86eaa0e90fb4d54a5ba6313aa1e9b269", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 23631996abd5ae..7d1410ac7c20b7 100644 --- a/flake.nix +++ b/flake.nix @@ -7,10 +7,7 @@ url = "github:nix-community/fenix"; inputs.nixpkgs.follows = "nixpkgs"; }; - crane = { - url = "github:ipetkov/crane"; - inputs.nixpkgs.follows = "nixpkgs"; - }; + crane.url = "github:ipetkov/crane"; flake-compat.url = "github:edolstra/flake-compat"; }; diff --git a/nix/shell.nix b/nix/shell.nix index 34682ab1d5f8d0..03e298e132f9b0 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -26,6 +26,7 @@ in nativeBuildInputs = with pkgs; [ clang curl + cmake perl pkg-config protobuf From 2699fa8d4a10eb38445e0c5979dfb56a2193f949 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Wed, 18 Sep 2024 11:59:19 +0800 Subject: [PATCH 171/762] windows: Fix `tailwind-language-server` (#17778) Closes #17741 I'm not sure why, but ever since `tailwind` was upgraded to `0.24`, there have been occasional errors indicating that the `.ps1` file could not be found. After reviewing the `.ps1` script, it appears that it simply starts the server using `node`. This PR directly using the method from the script to start the server with `node`. Co-authored-by: Anay Release Notes: - N/A --------- Co-authored-by: Anay --- crates/languages/src/tailwind.rs | 34 +++++++------------------------- 1 file changed, 7 insertions(+), 27 deletions(-) diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 524e4ce84611bc..9a053dbd8739ce 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -18,20 +18,15 @@ use std::{ use util::{maybe, ResultExt}; #[cfg(target_os = "windows")] -const SERVER_PATH: &str = "node_modules/.bin/tailwindcss-language-server.ps1"; +const SERVER_PATH: &str = + "node_modules/@tailwindcss/language-server/bin/tailwindcss-language-server"; #[cfg(not(target_os = "windows"))] const SERVER_PATH: &str = "node_modules/.bin/tailwindcss-language-server"; -#[cfg(not(target_os = "windows"))] fn server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] } -#[cfg(target_os = "windows")] -fn server_binary_arguments(server_path: &Path) -> Vec { - vec!["-File".into(), server_path.into(), "--stdio".into()] -} - pub struct TailwindLspAdapter { node: Arc, } @@ -114,26 +109,11 @@ impl LspAdapter for TailwindLspAdapter { .await?; } - #[cfg(target_os = "windows")] - { - let env_path = self.node.node_environment_path().await?; - let mut env = HashMap::default(); - env.insert("PATH".to_string(), env_path.to_string_lossy().to_string()); - - Ok(LanguageServerBinary { - path: "powershell.exe".into(), - env: Some(env), - arguments: server_binary_arguments(&server_path), - }) - } - #[cfg(not(target_os = "windows"))] - { - Ok(LanguageServerBinary { - path: self.node.binary_path().await?, - env: None, - arguments: server_binary_arguments(&server_path), - }) - } + Ok(LanguageServerBinary { + path: self.node.binary_path().await?, + env: None, + arguments: server_binary_arguments(&server_path), + }) } async fn cached_server_binary( From d4e10dfba3bc88c6476605a9efbd7f5c5be45a8a Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 18 Sep 2024 10:04:02 +0200 Subject: [PATCH 172/762] docs: Update rust-analyzer docs (#17988) Release Notes: - N/A --- docs/src/languages/rust.md | 182 ++++++++++++++++++------------------- 1 file changed, 91 insertions(+), 91 deletions(-) diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 026b522898cf07..233c378dae06d3 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -13,37 +13,29 @@ TBD: Provide explicit examples not just `....` ## Inlay Hints -The following configuration can be used to enable inlay hints for rust: +The following configuration can be used to change the inlay hint settings for `rust-analyzer` in Rust: ```json -"inlayHints": { - "maxLength": null, - "lifetimeElisionHints": { - "useParameterNames": true, - "enable": "skip_trivial" - }, - "closureReturnTypeHints": { - "enable": "always" - } -} -``` - -to make the language server send back inlay hints when Zed has them enabled in the settings. - -Use - -```json -"lsp": { - "rust-analyzer": { - "initialization_options": { - .... +{ + "lsp": { + "rust-analyzer": { + "initialization_options": { + "inlayHints": { + "maxLength": null, + "lifetimeElisionHints": { + "enable": "skip_trivial" + "useParameterNames": true, + }, + "closureReturnTypeHints": { + "enable": "always" + } + } + } } } } ``` -to override these settings. - See [Inlay Hints](https://rust-analyzer.github.io/manual.html#inlay-hints) in the Rust Analyzer Manual for more information. ## Target directory @@ -70,35 +62,39 @@ A `true` setting will set the target directory to `target/rust-analyzer`. You ca You can configure which `rust-analyzer` binary Zed should use. -To use a binary in a custom location, add the following to your `settings.json`: +By default, Zed will try to find a `rust-analyzer` in your `$PATH` and try to use that. If that binary successfully executes `rust-analyzer --help`, it's used. Otherwise, Zed will fall back to installing its own `rust-analyzer` version and using that. + +If you want to disable Zed looking for a `rust-analyzer` binary, you can set `path_lookup` to `false` in your `settings.json`: ```json { "lsp": { "rust-analyzer": { "binary": { - "path": "/Users/example/bin/rust-analyzer", - "args": [] + "path_lookup": false } } } } ``` -To use a binary that is on your `$PATH`, add the following to your `settings.json`: +If you want to use a binary in a custom location, you can specify a `path` and optional `args`: ```json { "lsp": { "rust-analyzer": { "binary": { - "path_lookup": true + "path": "/Users/example/bin/rust-analyzer", + "args": [] } } } } ``` +This `"path"` has to be an absolute path. + ## More server configuration If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/bump_nightly_tag.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/bump_nightly_tag.yml b/.github/workflows/bump_nightly_tag.yml index 54a3970a1cd83d..0959ae9677142c 100644 --- a/.github/workflows/bump_nightly_tag.yml +++ b/.github/workflows/bump_nightly_tag.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 with: fetch-depth: 0 From 97f5fcf8e6a42c07d0b12982030b701246ac3d65 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 15:18:29 -0400 Subject: [PATCH 192/762] Fix nightly linux x86 build (#18029) Makes our nightly script for Linux x86 (broken) match the steps for Linux ARM (working). --- .github/workflows/release_nightly.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 450c63b82f9cbd..17db66a264bdc0 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -113,6 +113,12 @@ jobs: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install Linux dependencies + run: ./script/linux + + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 100 + - name: Set release channel to nightly run: | set -euo pipefail From 9016de5d6350e0a9bbf6a51076c04acd9b7fba96 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:56:40 -0400 Subject: [PATCH 193/762] Update Rust crate anyhow to v1.0.89 (#18031) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [anyhow](https://redirect.github.com/dtolnay/anyhow) | workspace.dependencies | patch | `1.0.86` -> `1.0.89` | --- ### Release Notes
dtolnay/anyhow (anyhow) ### [`v1.0.89`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.89) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.88...1.0.89) - Make anyhow::Error's `UnwindSafe` and `RefUnwindSafe` impl consistently available between versions of Rust newer and older than 1.72 ([#​386](https://redirect.github.com/dtolnay/anyhow/issues/386)) ### [`v1.0.88`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.88) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.87...1.0.88) - Documentation improvements ### [`v1.0.87`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.87) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.86...1.0.87) - Support more APIs, including `Error::new` and `Error::chain`, in no-std mode on Rust 1.81+ ([#​383](https://redirect.github.com/dtolnay/anyhow/issues/383))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 930415440ba0ad..0640aff19c26a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -263,9 +263,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4" [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" [[package]] name = "approx" From 2c8a6ee7cc18cb8b3e29fa4c7efa74dde8458f4f Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 18 Sep 2024 23:29:34 +0200 Subject: [PATCH 194/762] remote_server: Remove dependency on libssl and libcrypto (#15446) Fixes: #15599 Release Notes: - N/A --------- Co-authored-by: Mikayla Co-authored-by: Conrad --- Cargo.lock | 176 ++++++++++++++--- Cargo.toml | 7 + crates/auto_update/Cargo.toml | 1 - crates/auto_update/src/auto_update.rs | 3 +- crates/client/Cargo.toml | 4 +- crates/client/src/client.rs | 42 ++-- crates/collab/Cargo.toml | 1 + crates/collab/src/llm.rs | 3 +- crates/collab/src/rpc.rs | 8 +- crates/evals/Cargo.toml | 1 + crates/evals/src/eval.rs | 22 ++- crates/extension/Cargo.toml | 1 + crates/extension/src/extension_builder.rs | 2 + crates/extension/src/extension_store.rs | 13 +- crates/extension/src/extension_store_test.rs | 24 ++- crates/extension_cli/Cargo.toml | 2 +- crates/extension_cli/src/main.rs | 10 +- crates/git_hosting_providers/Cargo.toml | 1 - .../src/providers/codeberg.rs | 10 +- .../src/providers/github.rs | 10 +- crates/gpui/Cargo.toml | 6 +- crates/gpui/examples/image/image.rs | 1 + crates/gpui/src/app.rs | 31 ++- crates/gpui/src/elements/img.rs | 7 +- crates/gpui/src/gpui.rs | 1 + crates/http_client/Cargo.toml | 5 +- crates/http_client/src/async_body.rs | 109 +++++++++++ crates/http_client/src/github.rs | 5 +- crates/http_client/src/http_client.rs | 179 +++++++++--------- crates/isahc_http_client/Cargo.toml | 22 +++ crates/isahc_http_client/LICENSE-APACHE | 1 + .../src/isahc_http_client.rs | 93 +++++++++ crates/ollama/Cargo.toml | 1 - crates/ollama/src/ollama.rs | 31 ++- crates/project/src/lsp_store.rs | 12 +- crates/semantic_index/Cargo.toml | 1 + crates/semantic_index/examples/index.rs | 7 +- crates/semantic_index/src/embedding/ollama.rs | 2 +- crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 31 ++- script/bundle-linux | 9 +- 41 files changed, 670 insertions(+), 226 deletions(-) create mode 100644 crates/http_client/src/async_body.rs create mode 100644 crates/isahc_http_client/Cargo.toml create mode 120000 crates/isahc_http_client/LICENSE-APACHE create mode 100644 crates/isahc_http_client/src/isahc_http_client.rs diff --git a/Cargo.lock b/Cargo.lock index 0640aff19c26a8..652c584fd53795 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -876,6 +876,20 @@ version = "4.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" +[[package]] +name = "async-tls" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfeefd0ca297cbbb3bd34fd6b228401c2a5177038257afd751bc29f0a2da4795" +dependencies = [ + "futures-core", + "futures-io", + "rustls 0.20.9", + "rustls-pemfile 1.0.4", + "webpki", + "webpki-roots 0.22.6", +] + [[package]] name = "async-trait" version = "0.1.81" @@ -893,8 +907,8 @@ version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc" dependencies = [ - "async-native-tls", "async-std", + "async-tls", "futures-io", "futures-util", "log", @@ -981,7 +995,6 @@ dependencies = [ "editor", "gpui", "http_client", - "isahc", "log", "markdown_preview", "menu", @@ -1049,7 +1062,7 @@ dependencies = [ "fastrand 2.1.1", "hex", "http 0.2.12", - "ring", + "ring 0.17.8", "time", "tokio", "tracing", @@ -1218,7 +1231,7 @@ dependencies = [ "once_cell", "p256", "percent-encoding", - "ring", + "ring 0.17.8", "sha2", "subtle", "time", @@ -1331,7 +1344,7 @@ dependencies = [ "once_cell", "pin-project-lite", "pin-utils", - "rustls", + "rustls 0.21.12", "tokio", "tracing", ] @@ -2405,6 +2418,8 @@ dependencies = [ "rand 0.8.5", "release_channel", "rpc", + "rustls 0.20.9", + "rustls-native-certs 0.8.0", "schemars", "serde", "serde_json", @@ -2553,6 +2568,7 @@ dependencies = [ "http_client", "hyper", "indoc", + "isahc_http_client", "jsonwebtoken", "language", "language_model", @@ -4015,6 +4031,7 @@ dependencies = [ "git", "gpui", "http_client", + "isahc_http_client", "language", "languages", "node_runtime", @@ -4110,6 +4127,7 @@ dependencies = [ "http_client", "indexed_docs", "isahc", + "isahc_http_client", "language", "log", "lsp", @@ -4148,7 +4166,7 @@ dependencies = [ "env_logger", "extension", "fs", - "http_client", + "isahc_http_client", "language", "log", "rpc", @@ -4395,7 +4413,7 @@ dependencies = [ "futures-core", "futures-sink", "nanorand", - "spin", + "spin 0.9.8", ] [[package]] @@ -4904,7 +4922,6 @@ dependencies = [ "git", "gpui", "http_client", - "isahc", "pretty_assertions", "regex", "serde", @@ -5537,12 +5554,11 @@ dependencies = [ "anyhow", "derive_more", "futures 0.3.30", - "futures-lite 1.13.0", - "http 1.1.0", - "isahc", + "http 0.2.12", "log", "serde", "serde_json", + "smol", "url", ] @@ -5604,8 +5620,8 @@ dependencies = [ "http 0.2.12", "hyper", "log", - "rustls", - "rustls-native-certs", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", "tokio", "tokio-rustls", ] @@ -6017,6 +6033,17 @@ dependencies = [ "waker-fn", ] +[[package]] +name = "isahc_http_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.30", + "http_client", + "isahc", + "util", +] + [[package]] name = "itertools" version = "0.10.5" @@ -6121,7 +6148,7 @@ dependencies = [ "base64 0.21.7", "js-sys", "pem", - "ring", + "ring 0.17.8", "serde", "serde_json", "simple_asn1", @@ -6372,7 +6399,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin", + "spin 0.9.8", ] [[package]] @@ -7483,7 +7510,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -9175,7 +9201,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", @@ -9239,6 +9265,21 @@ dependencies = [ "util", ] +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + [[package]] name = "ring" version = "0.17.8" @@ -9249,8 +9290,8 @@ dependencies = [ "cfg-if", "getrandom 0.2.15", "libc", - "spin", - "untrusted", + "spin 0.9.8", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -9406,7 +9447,7 @@ dependencies = [ "futures 0.3.30", "glob", "rand 0.8.5", - "ring", + "ring 0.17.8", "serde", "serde_json", "shellexpand 3.1.0", @@ -9527,6 +9568,18 @@ dependencies = [ "rustix 0.38.35", ] +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + [[package]] name = "rustls" version = "0.21.12" @@ -9534,7 +9587,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring", + "ring 0.17.8", "rustls-webpki", "sct", ] @@ -9546,7 +9599,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", - "rustls-pemfile", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.1.3", + "rustls-pki-types", "schannel", "security-framework", ] @@ -9560,14 +9626,30 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +dependencies = [ + "base64 0.22.1", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" + [[package]] name = "rustls-webpki" version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -9681,8 +9763,8 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -9878,6 +9960,7 @@ dependencies = [ "gpui", "heed", "http_client", + "isahc_http_client", "language", "language_model", "languages", @@ -10437,6 +10520,12 @@ dependencies = [ "smallvec", ] +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "spin" version = "0.9.8" @@ -10559,8 +10648,8 @@ dependencies = [ "paste", "percent-encoding", "rust_decimal", - "rustls", - "rustls-pemfile", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "serde", "serde_json", "sha2", @@ -10573,7 +10662,7 @@ dependencies = [ "tracing", "url", "uuid", - "webpki-roots", + "webpki-roots 0.25.4", ] [[package]] @@ -11705,7 +11794,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.12", "tokio", ] @@ -12232,7 +12321,6 @@ dependencies = [ "http 0.2.12", "httparse", "log", - "native-tls", "rand 0.8.5", "sha1", "thiserror", @@ -12417,6 +12505,12 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "untrusted" version = "0.9.0" @@ -13271,6 +13365,25 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + [[package]] name = "webpki-roots" version = "0.25.4" @@ -14305,6 +14418,7 @@ dependencies = [ "inline_completion_button", "install_cli", "isahc", + "isahc_http_client", "journal", "language", "language_model", diff --git a/Cargo.toml b/Cargo.toml index ec3138179b1c5a..2071fdcb6ff484 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,6 +52,7 @@ members = [ "crates/indexed_docs", "crates/inline_completion_button", "crates/install_cli", + "crates/isahc_http_client", "crates/journal", "crates/language", "crates/language_model", @@ -173,6 +174,9 @@ members = [ default-members = ["crates/zed"] [workspace.dependencies] + + + # # Workspace member crates # @@ -212,6 +216,7 @@ file_icons = { path = "crates/file_icons" } fs = { path = "crates/fs" } fsevent = { path = "crates/fsevent" } fuzzy = { path = "crates/fuzzy" } +isahc_http_client = { path = "crates/isahc_http_client" } git = { path = "crates/git" } git_hosting_providers = { path = "crates/git_hosting_providers" } go_to_line = { path = "crates/go_to_line" } @@ -394,6 +399,8 @@ runtimelib = { version = "0.15", default-features = false, features = [ ] } rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } +rustls = "0.20.3" +rustls-native-certs = "0.8.0" schemars = { version = "0.8", features = ["impl_json_schema"] } semver = "1.0" serde = { version = "1.0", features = ["derive", "rc"] } diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml index 12e669780d83d7..1e08c9a768b0d5 100644 --- a/crates/auto_update/Cargo.toml +++ b/crates/auto_update/Cargo.toml @@ -19,7 +19,6 @@ db.workspace = true editor.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true log.workspace = true markdown_preview.workspace = true menu.workspace = true diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index c0863e41d1aa9c..cfda6d6e584b91 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -9,7 +9,6 @@ use gpui::{ actions, AppContext, AsyncAppContext, Context as _, Global, Model, ModelContext, SemanticVersion, SharedString, Task, View, ViewContext, VisualContext, WindowContext, }; -use isahc::AsyncBody; use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView}; use schemars::JsonSchema; @@ -20,7 +19,7 @@ use smol::{fs, io::AsyncReadExt}; use settings::{Settings, SettingsSources, SettingsStore}; use smol::{fs::File, process::Command}; -use http_client::{HttpClient, HttpClientWithUrl}; +use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use std::{ env::{ diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 82237ebaa5abbd..8ae4f15c9796b1 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -18,7 +18,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup [dependencies] anyhow.workspace = true async-recursion = "0.3" -async-tungstenite = { workspace = true, features = ["async-std", "async-native-tls"] } +async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] } chrono = { workspace = true, features = ["serde"] } clock.workspace = true collections.workspace = true @@ -35,6 +35,8 @@ postage.workspace = true rand.workspace = true release_channel.workspace = true rpc = { workspace = true, features = ["gpui"] } +rustls.workspace = true +rustls-native-certs.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 6e1362c43e16fb..09286300d9d38f 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -240,8 +240,6 @@ pub enum EstablishConnectionError { #[error("{0}")] Other(#[from] anyhow::Error), #[error("{0}")] - Http(#[from] http_client::Error), - #[error("{0}")] InvalidHeaderValue(#[from] async_tungstenite::tungstenite::http::header::InvalidHeaderValue), #[error("{0}")] Io(#[from] std::io::Error), @@ -529,19 +527,13 @@ impl Client { } pub fn production(cx: &mut AppContext) -> Arc { - let user_agent = format!( - "Zed/{} ({}; {})", - AppVersion::global(cx), - std::env::consts::OS, - std::env::consts::ARCH - ); let clock = Arc::new(clock::RealSystemClock); - let http = Arc::new(HttpClientWithUrl::new( + let http = Arc::new(HttpClientWithUrl::new_uri( + cx.http_client(), &ClientSettings::get_global(cx).server_url, - Some(user_agent), - ProxySettings::get_global(cx).proxy.clone(), + cx.http_client().proxy().cloned(), )); - Self::new(clock, http.clone(), cx) + Self::new(clock, http, cx) } pub fn id(&self) -> u64 { @@ -1145,8 +1137,32 @@ impl Client { match url_scheme { Https => { + let client_config = { + let mut root_store = rustls::RootCertStore::empty(); + + let root_certs = rustls_native_certs::load_native_certs(); + for error in root_certs.errors { + log::warn!("error loading native certs: {:?}", error); + } + root_store.add_parsable_certificates( + &root_certs + .certs + .into_iter() + .map(|cert| cert.as_ref().to_owned()) + .collect::>(), + ); + rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(root_store) + .with_no_client_auth() + }; let (stream, _) = - async_tungstenite::async_std::client_async_tls(request, stream).await?; + async_tungstenite::async_tls::client_async_tls_with_connector( + request, + stream, + Some(client_config.into()), + ) + .await?; Ok(Connection::new( stream .map_err(|error| anyhow!(error)) diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index f8ba847ab2b9c6..296809158dd14e 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -36,6 +36,7 @@ envy = "0.4.2" futures.workspace = true google_ai.workspace = true hex.workspace = true +isahc_http_client.workspace = true http_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index def4499ae41bac..53f0bfdfd0130a 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -22,7 +22,7 @@ use chrono::{DateTime, Duration, Utc}; use collections::HashMap; use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase}; use futures::{Stream, StreamExt as _}; -use http_client::IsahcHttpClient; +use isahc_http_client::IsahcHttpClient; use rpc::ListModelsResponse; use rpc::{ proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME, @@ -72,6 +72,7 @@ impl LlmState { let http_client = IsahcHttpClient::builder() .default_header("User-Agent", user_agent) .build() + .map(IsahcHttpClient::from) .context("failed to construct http client")?; let this = Self { diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 4146eafb870a44..b2a694027a6f8b 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -35,6 +35,8 @@ use chrono::Utc; use collections::{HashMap, HashSet}; pub use connection_pool::{ConnectionPool, ZedVersion}; use core::fmt::{self, Debug, Formatter}; +use http_client::HttpClient; +use isahc_http_client::IsahcHttpClient; use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL}; use sha2::Digest; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; @@ -45,7 +47,6 @@ use futures::{ stream::FuturesUnordered, FutureExt, SinkExt, StreamExt, TryStreamExt, }; -use http_client::IsahcHttpClient; use prometheus::{register_int_gauge, IntGauge}; use rpc::{ proto::{ @@ -139,7 +140,7 @@ struct Session { connection_pool: Arc>, app_state: Arc, supermaven_client: Option>, - http_client: Arc, + http_client: Arc, /// The GeoIP country code for the user. #[allow(unused)] geoip_country_code: Option, @@ -955,9 +956,10 @@ impl Server { tracing::info!("connection opened"); + let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { - Ok(http_client) => Arc::new(http_client), + Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)), Err(error) => { tracing::error!(?error, "failed to create HTTP client"); return; diff --git a/crates/evals/Cargo.toml b/crates/evals/Cargo.toml index e680e4f5046236..400ab139aa2e40 100644 --- a/crates/evals/Cargo.toml +++ b/crates/evals/Cargo.toml @@ -24,6 +24,7 @@ feature_flags.workspace = true fs.workspace = true git.workspace = true gpui.workspace = true +isahc_http_client.workspace = true language.workspace = true languages.workspace = true http_client.workspace = true diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index d7e63fafbf8202..751dcd09aa1f5c 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -97,13 +97,14 @@ fn main() -> Result<()> { gpui::App::headless().run(move |cx| { let executor = cx.background_executor().clone(); - + let client = isahc_http_client::IsahcHttpClient::new(None, None); + cx.set_http_client(client.clone()); match cli.command { Commands::Fetch {} => { executor .clone() .spawn(async move { - if let Err(err) = fetch_evaluation_resources(&executor).await { + if let Err(err) = fetch_evaluation_resources(client, &executor).await { eprintln!("Error: {}", err); exit(1); } @@ -127,10 +128,12 @@ fn main() -> Result<()> { Ok(()) } -async fn fetch_evaluation_resources(executor: &BackgroundExecutor) -> Result<()> { - let http_client = http_client::HttpClientWithProxy::new(None, None); - fetch_code_search_net_resources(&http_client).await?; - fetch_eval_repos(executor, &http_client).await?; +async fn fetch_evaluation_resources( + http_client: Arc, + executor: &BackgroundExecutor, +) -> Result<()> { + fetch_code_search_net_resources(&*http_client).await?; + fetch_eval_repos(executor, &*http_client).await?; Ok(()) } @@ -239,6 +242,7 @@ async fn run_evaluation( executor: &BackgroundExecutor, cx: &mut AsyncAppContext, ) -> Result<()> { + let mut http_client = None; cx.update(|cx| { let mut store = SettingsStore::new(cx); store @@ -248,15 +252,15 @@ async fn run_evaluation( client::init_settings(cx); language::init(cx); Project::init_settings(cx); + http_client = Some(cx.http_client()); cx.update_flags(false, vec![]); }) .unwrap(); - + let http_client = http_client.unwrap(); let dataset_dir = Path::new(CODESEARCH_NET_DIR); let evaluations_path = dataset_dir.join("evaluations.json"); let repos_dir = Path::new(EVAL_REPOS_DIR); let db_path = Path::new(EVAL_DB_PATH); - let http_client = http_client::HttpClientWithProxy::new(None, None); let api_key = std::env::var("OPENAI_API_KEY").unwrap(); let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); let fs = Arc::new(RealFs::new(git_hosting_provider_registry, None)) as Arc; @@ -266,9 +270,9 @@ async fn run_evaluation( Client::new( clock, Arc::new(http_client::HttpClientWithUrl::new( + http_client.clone(), "https://zed.dev", None, - None, )), cx, ) diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index 0371b1866dfd9a..edf6184d38475d 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -57,6 +57,7 @@ task.workspace = true serde_json_lenient.workspace = true [dev-dependencies] +isahc_http_client.workspace = true ctor.workspace = true env_logger.workspace = true parking_lot.workspace = true diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index e42929f78ebb1a..7380e699f9e715 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -246,6 +246,7 @@ impl ExtensionBuilder { .args(scanner_path.exists().then_some(scanner_path)) .output() .context("failed to run clang")?; + if !clang_output.status.success() { bail!( "failed to compile {} parser with clang: {}", @@ -431,6 +432,7 @@ impl ExtensionBuilder { let body = BufReader::new(response.body_mut()); let body = GzipDecoder::new(body); let tar = Archive::new(body); + tar.unpack(&tar_out_dir) .await .context("failed to unpack wasi-sdk archive")?; diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 3ebc4f20d36975..bd416f4029f496 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -190,6 +190,7 @@ pub fn init( None, fs, client.http_client().clone(), + client.http_client().clone(), Some(client.telemetry().clone()), node_runtime, language_registry, @@ -225,6 +226,7 @@ impl ExtensionStore { build_dir: Option, fs: Arc, http_client: Arc, + builder_client: Arc, telemetry: Option>, node_runtime: Arc, language_registry: Arc, @@ -244,12 +246,7 @@ impl ExtensionStore { extension_index: Default::default(), installed_dir, index_path, - builder: Arc::new(ExtensionBuilder::new( - // Construct a real HTTP client for the extension builder, as we - // don't want to use a fake one in the tests. - ::http_client::client(None, http_client.proxy().cloned()), - build_dir, - )), + builder: Arc::new(ExtensionBuilder::new(builder_client, build_dir)), outstanding_operations: Default::default(), modified_extensions: Default::default(), reload_complete_senders: Vec::new(), @@ -830,7 +827,6 @@ impl ExtensionStore { let mut extension_manifest = ExtensionManifest::load(fs.clone(), &extension_source_path).await?; let extension_id = extension_manifest.id.clone(); - if !this.update(&mut cx, |this, cx| { match this.outstanding_operations.entry(extension_id.clone()) { btree_map::Entry::Occupied(_) => return false, @@ -854,7 +850,6 @@ impl ExtensionStore { .ok(); } }); - cx.background_executor() .spawn({ let extension_source_path = extension_source_path.clone(); @@ -885,10 +880,8 @@ impl ExtensionStore { bail!("extension {extension_id} is already installed"); } } - fs.create_symlink(output_path, extension_source_path) .await?; - this.update(&mut cx, |this, cx| this.reload(None, cx))? .await; Ok(()) diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 326c713bd563ff..0fbd00e0b4ca8a 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -13,10 +13,12 @@ use futures::{io::BufReader, AsyncReadExt, StreamExt}; use gpui::{Context, SemanticVersion, TestAppContext}; use http_client::{FakeHttpClient, Response}; use indexed_docs::IndexedDocsRegistry; +use isahc_http_client::IsahcHttpClient; use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; use node_runtime::FakeNodeRuntime; use parking_lot::Mutex; use project::{Project, DEFAULT_COMPLETION_CONTEXT}; +use release_channel::AppVersion; use serde_json::json; use settings::{Settings as _, SettingsStore}; use snippet_provider::SnippetRegistry; @@ -270,6 +272,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { None, fs.clone(), http_client.clone(), + http_client.clone(), None, node_runtime.clone(), language_registry.clone(), @@ -397,6 +400,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { None, fs.clone(), http_client.clone(), + http_client.clone(), None, node_runtime.clone(), language_registry.clone(), @@ -453,6 +457,8 @@ async fn test_extension_store(cx: &mut TestAppContext) { }); } +// TODO remove +#[ignore] #[gpui::test] async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { init_test(cx); @@ -502,7 +508,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { http_request_count: 0, })); - let http_client = FakeHttpClient::create({ + let extension_client = FakeHttpClient::create({ let language_server_version = language_server_version.clone(); move |request| { let language_server_version = language_server_version.clone(); @@ -558,19 +564,33 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice())); encoder.read_to_end(&mut gzipped_bytes).await.unwrap(); Ok(Response::new(gzipped_bytes.into())) + // } else if uri == WASI_ADAPTER_URL { + // let binary_contents = + // include_bytes!("wasi_snapshot_preview1.reactor.wasm").as_slice(); + // Ok(Response::new(binary_contents.into())) } else { Ok(Response::builder().status(404).body("not found".into())?) } } } }); + let user_agent = cx.update(|cx| { + format!( + "Zed/{} ({}; {})", + AppVersion::global(cx), + std::env::consts::OS, + std::env::consts::ARCH + ) + }); + let builder_client = IsahcHttpClient::new(None, Some(user_agent)); let extension_store = cx.new_model(|cx| { ExtensionStore::new( extensions_dir.clone(), Some(cache_dir), fs.clone(), - http_client.clone(), + extension_client.clone(), + builder_client, None, node_runtime, language_registry.clone(), diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index 54c47f4a829af6..bc649d8e04989f 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -18,7 +18,7 @@ clap = { workspace = true, features = ["derive"] } env_logger.workspace = true extension = { workspace = true, features = ["no-webrtc"] } fs.workspace = true -http_client.workspace = true +isahc_http_client.workspace = true language.workspace = true log.workspace = true rpc.workspace = true diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index 029c560e57db81..6eaebca2f0e9bf 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -7,13 +7,13 @@ use std::{ }; use ::fs::{copy_recursive, CopyOptions, Fs, RealFs}; -use ::http_client::HttpClientWithProxy; use anyhow::{anyhow, bail, Context, Result}; use clap::Parser; use extension::{ extension_builder::{CompileExtensionOptions, ExtensionBuilder}, ExtensionManifest, }; +use isahc_http_client::IsahcHttpClient; use language::LanguageConfig; use theme::ThemeRegistry; use tree_sitter::{Language, Query, WasmStore}; @@ -66,7 +66,13 @@ async fn main() -> Result<()> { std::env::consts::OS, std::env::consts::ARCH ); - let http_client = Arc::new(HttpClientWithProxy::new(Some(user_agent), None)); + let http_client = Arc::new( + IsahcHttpClient::builder() + .default_header("User-Agent", user_agent) + .build() + .map(IsahcHttpClient::from)?, + ); + let builder = ExtensionBuilder::new(http_client, scratch_dir); builder .compile_extension( diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index caca91c1ab93d6..b8ad1ed05d1605 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -18,7 +18,6 @@ futures.workspace = true git.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true regex.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/git_hosting_providers/src/providers/codeberg.rs b/crates/git_hosting_providers/src/providers/codeberg.rs index b34d809100be56..eaadca1ecf9618 100644 --- a/crates/git_hosting_providers/src/providers/codeberg.rs +++ b/crates/git_hosting_providers/src/providers/codeberg.rs @@ -3,9 +3,7 @@ use std::sync::Arc; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::HttpClient; -use isahc::config::Configurable; -use isahc::{AsyncBody, Request}; +use http_client::{AsyncBody, HttpClient, Request}; use serde::Deserialize; use url::Url; @@ -51,16 +49,14 @@ impl Codeberg { let url = format!("https://codeberg.org/api/v1/repos/{repo_owner}/{repo}/git/commits/{commit}"); - let mut request = Request::get(&url) - .redirect_policy(isahc::config::RedirectPolicy::Follow) - .header("Content-Type", "application/json"); + let mut request = Request::get(&url).header("Content-Type", "application/json"); if let Ok(codeberg_token) = std::env::var("CODEBERG_TOKEN") { request = request.header("Authorization", format!("Bearer {}", codeberg_token)); } let mut response = client - .send(request.body(AsyncBody::default())?) + .send_with_redirect_policy(request.body(AsyncBody::default())?, true) .await .with_context(|| format!("error fetching Codeberg commit details at {:?}", url))?; diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 103f6ae1ce734c..be46b51ddf7bdf 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -3,9 +3,7 @@ use std::sync::{Arc, OnceLock}; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::HttpClient; -use isahc::config::Configurable; -use isahc::{AsyncBody, Request}; +use http_client::{AsyncBody, HttpClient, Request}; use regex::Regex; use serde::Deserialize; use url::Url; @@ -55,16 +53,14 @@ impl Github { ) -> Result> { let url = format!("https://api.github.com/repos/{repo_owner}/{repo}/commits/{commit}"); - let mut request = Request::get(&url) - .redirect_policy(isahc::config::RedirectPolicy::Follow) - .header("Content-Type", "application/json"); + let mut request = Request::get(&url).header("Content-Type", "application/json"); if let Ok(github_token) = std::env::var("GITHUB_TOKEN") { request = request.header("Authorization", format!("Bearer {}", github_token)); } let mut response = client - .send(request.body(AsyncBody::default())?) + .send_with_redirect_policy(request.body(AsyncBody::default())?, true) .await .with_context(|| format!("error fetching GitHub commit details at {:?}", url))?; diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index d0d75b73e97a7d..e2339a38edd1f4 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -11,13 +11,13 @@ license = "Apache-2.0" workspace = true [features] -default = [] +default = ["http_client"] test-support = [ "backtrace", "collections/test-support", "rand", "util/test-support", - "http_client/test-support", + "http_client?/test-support", ] runtime_shaders = [] macos-blade = ["blade-graphics", "blade-macros", "blade-util", "bytemuck"] @@ -40,7 +40,7 @@ derive_more.workspace = true etagere = "0.2" futures.workspace = true gpui_macros.workspace = true -http_client.workspace = true +http_client = { optional = true, workspace = true } image = "0.25.1" itertools.workspace = true linkme = "0.3" diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index ac7af186d30ba0..157dbdf70f1afc 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -131,6 +131,7 @@ fn main() { PathBuf::from_str("crates/gpui/examples/image/app-icon.png").unwrap(), ), remote_resource: "https://picsum.photos/512/512".into(), + asset_resource: "image/color.svg".into(), }) }) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ee7a6ef191a6e2..6cb491b100810b 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -117,7 +117,7 @@ impl App { Self(AppContext::new( current_platform(false), Arc::new(()), - http_client::client(None, None), + Arc::new(NullHttpClient), )) } @@ -128,7 +128,7 @@ impl App { Self(AppContext::new( current_platform(true), Arc::new(()), - http_client::client(None, None), + Arc::new(NullHttpClient), )) } @@ -142,6 +142,14 @@ impl App { self } + /// Set the http client for the application + pub fn with_http_client(self, http_client: Arc) -> Self { + let mut context_lock = self.0.borrow_mut(); + context_lock.http_client = http_client; + drop(context_lock); + self + } + /// Start the application. The provided callback will be called once the /// app is fully launched. pub fn run(self, on_finish_launching: F) @@ -1512,3 +1520,22 @@ pub struct KeystrokeEvent { /// The action that was resolved for the keystroke, if any pub action: Option>, } + +struct NullHttpClient; + +impl HttpClient for NullHttpClient { + fn send_with_redirect_policy( + &self, + _req: http_client::Request, + _follow_redirects: bool, + ) -> futures::future::BoxFuture< + 'static, + Result, anyhow::Error>, + > { + async move { Err(anyhow!("No HttpClient available")) }.boxed() + } + + fn proxy(&self) -> Option<&http_client::Uri> { + None + } +} diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index f1e8bb68e3237e..63236d5309f145 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -345,7 +345,10 @@ impl Asset for ImageAsset { let bytes = match source.clone() { UriOrPath::Path(uri) => fs::read(uri.as_ref())?, UriOrPath::Uri(uri) => { - let mut response = client.get(uri.as_ref(), ().into(), true).await?; + let mut response = client + .get(uri.as_ref(), ().into(), true) + .await + .map_err(|e| ImageCacheError::Client(Arc::new(e)))?; let mut body = Vec::new(); response.body_mut().read_to_end(&mut body).await?; if !response.status().is_success() { @@ -429,7 +432,7 @@ impl Asset for ImageAsset { pub enum ImageCacheError { /// An error that occurred while fetching an image from a remote source. #[error("http error: {0}")] - Client(#[from] http_client::Error), + Client(#[from] Arc), /// An error that occurred while reading the image from disk. #[error("IO error: {0}")] Io(Arc), diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index a447478a9be4b3..7ba3ce055ecc67 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -128,6 +128,7 @@ pub use executor::*; pub use geometry::*; pub use global::*; pub use gpui_macros::{register_action, test, IntoElement, Render}; +pub use http_client; pub use input::*; pub use interactive::*; use key_dispatch::*; diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index ae017685a9471a..0244ac41042b6f 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -16,13 +16,12 @@ path = "src/http_client.rs" doctest = true [dependencies] -http = "1.0.0" +http = "0.2" anyhow.workspace = true derive_more.workspace = true futures.workspace = true -isahc.workspace = true log.workspace = true serde.workspace = true serde_json.workspace = true -futures-lite.workspace = true +smol.workspace = true url.workspace = true diff --git a/crates/http_client/src/async_body.rs b/crates/http_client/src/async_body.rs new file mode 100644 index 00000000000000..e2544f60fe5404 --- /dev/null +++ b/crates/http_client/src/async_body.rs @@ -0,0 +1,109 @@ +use std::{borrow::Cow, io::Read, pin::Pin, task::Poll}; + +use futures::{AsyncRead, AsyncReadExt}; + +/// Based on the implementation of AsyncBody in +/// https://github.com/sagebind/isahc/blob/5c533f1ef4d6bdf1fd291b5103c22110f41d0bf0/src/body/mod.rs +pub struct AsyncBody(pub Inner); + +pub enum Inner { + /// An empty body. + Empty, + + /// A body stored in memory. + SyncReader(std::io::Cursor>), + + /// An asynchronous reader. + AsyncReader(Pin>), +} + +impl AsyncBody { + /// Create a new empty body. + /// + /// An empty body represents the *absence* of a body, which is semantically + /// different than the presence of a body of zero length. + pub fn empty() -> Self { + Self(Inner::Empty) + } + /// Create a streaming body that reads from the given reader. + pub fn from_reader(read: R) -> Self + where + R: AsyncRead + Send + Sync + 'static, + { + Self(Inner::AsyncReader(Box::pin(read))) + } +} + +impl Default for AsyncBody { + fn default() -> Self { + Self(Inner::Empty) + } +} + +impl From<()> for AsyncBody { + fn from(_: ()) -> Self { + Self(Inner::Empty) + } +} + +impl From> for AsyncBody { + fn from(body: Vec) -> Self { + Self(Inner::SyncReader(std::io::Cursor::new(Cow::Owned(body)))) + } +} + +impl From<&'_ [u8]> for AsyncBody { + fn from(body: &[u8]) -> Self { + body.to_vec().into() + } +} + +impl From for AsyncBody { + fn from(body: String) -> Self { + body.into_bytes().into() + } +} + +impl From<&'_ str> for AsyncBody { + fn from(body: &str) -> Self { + body.as_bytes().into() + } +} + +impl> From> for AsyncBody { + fn from(body: Option) -> Self { + match body { + Some(body) => body.into(), + None => Self(Inner::Empty), + } + } +} + +impl std::io::Read for AsyncBody { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + match &mut self.0 { + Inner::Empty => Ok(0), + Inner::SyncReader(cursor) => cursor.read(buf), + Inner::AsyncReader(async_reader) => smol::block_on(async_reader.read(buf)), + } + } +} + +impl futures::AsyncRead for AsyncBody { + fn poll_read( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut [u8], + ) -> std::task::Poll> { + // SAFETY: Standard Enum pin projection + let inner = unsafe { &mut self.get_unchecked_mut().0 }; + match inner { + Inner::Empty => Poll::Ready(Ok(0)), + // Blocking call is over an in-memory buffer + Inner::SyncReader(cursor) => Poll::Ready(cursor.read(buf)), + Inner::AsyncReader(async_reader) => { + AsyncRead::poll_read(async_reader.as_mut(), cx, buf) + } + } + } +} diff --git a/crates/http_client/src/github.rs b/crates/http_client/src/github.rs index a64a5bae5c681c..70587fa3cea04b 100644 --- a/crates/http_client/src/github.rs +++ b/crates/http_client/src/github.rs @@ -34,7 +34,7 @@ pub async fn latest_github_release( ) -> Result { let mut response = http .get( - &format!("https://api.github.com/repos/{repo_name_with_owner}/releases"), + format!("https://api.github.com/repos/{repo_name_with_owner}/releases").as_str(), Default::default(), true, ) @@ -91,13 +91,14 @@ pub async fn get_release_by_tag_name( .context("error fetching latest release")?; let mut body = Vec::new(); + let status = response.status(); response .body_mut() .read_to_end(&mut body) .await .context("error reading latest release")?; - if response.status().is_client_error() { + if status.is_client_error() { let text = String::from_utf8_lossy(body.as_slice()); bail!( "status error {}, response: {text:?}", diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 7ea0029d79029f..d78b2dd23c7f7f 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -1,47 +1,48 @@ +mod async_body; pub mod github; pub use anyhow::{anyhow, Result}; +pub use async_body::{AsyncBody, Inner}; use derive_more::Deref; +pub use http::{self, Method, Request, Response, StatusCode, Uri}; + use futures::future::BoxFuture; -use futures_lite::FutureExt; -use isahc::config::{Configurable, RedirectPolicy}; -pub use isahc::http; -pub use isahc::{ - http::{Method, StatusCode, Uri}, - AsyncBody, Error, HttpClient as IsahcHttpClient, Request, Response, -}; +use http::request::Builder; #[cfg(feature = "test-support")] use std::fmt; -use std::{ - sync::{Arc, Mutex}, - time::Duration, -}; +use std::sync::{Arc, Mutex}; pub use url::Url; -pub trait HttpClient: Send + Sync { +pub trait HttpClient: 'static + Send + Sync { fn send( + &self, + req: http::Request, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.send_with_redirect_policy(req, false) + } + + // TODO: Make a better API for this + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>>; + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>>; fn get<'a>( &'a self, uri: &str, body: AsyncBody, follow_redirects: bool, - ) -> BoxFuture<'a, Result, Error>> { - let request = isahc::Request::builder() - .redirect_policy(if follow_redirects { - RedirectPolicy::Follow - } else { - RedirectPolicy::None - }) - .method(Method::GET) - .uri(uri) - .body(body); + ) -> BoxFuture<'a, Result, anyhow::Error>> { + let request = Builder::new().uri(uri).body(body); + match request { - Ok(request) => self.send(request), - Err(error) => async move { Err(error.into()) }.boxed(), + Ok(request) => Box::pin(async move { + self.send_with_redirect_policy(request, follow_redirects) + .await + .map_err(Into::into) + }), + Err(e) => Box::pin(async move { Err(e.into()) }), } } @@ -49,15 +50,16 @@ pub trait HttpClient: Send + Sync { &'a self, uri: &str, body: AsyncBody, - ) -> BoxFuture<'a, Result, Error>> { - let request = isahc::Request::builder() - .method(Method::POST) + ) -> BoxFuture<'a, Result, anyhow::Error>> { + let request = Builder::new() .uri(uri) + .method(Method::POST) .header("Content-Type", "application/json") .body(body); + match request { - Ok(request) => self.send(request), - Err(error) => async move { Err(error.into()) }.boxed(), + Ok(request) => Box::pin(async move { self.send(request).await.map_err(Into::into) }), + Err(e) => Box::pin(async move { Err(e.into()) }), } } @@ -74,29 +76,28 @@ pub struct HttpClientWithProxy { impl HttpClientWithProxy { /// Returns a new [`HttpClientWithProxy`] with the given proxy URL. - pub fn new(user_agent: Option, proxy_url: Option) -> Self { - let proxy_url = proxy_url - .and_then(|input| { - input - .parse::() - .inspect_err(|e| log::error!("Error parsing proxy settings: {}", e)) - .ok() - }) + pub fn new(client: Arc, proxy_url: Option) -> Self { + let proxy_uri = proxy_url + .and_then(|proxy| proxy.parse().ok()) .or_else(read_proxy_from_env); + Self::new_uri(client, proxy_uri) + } + pub fn new_uri(client: Arc, proxy_uri: Option) -> Self { Self { - client: client(user_agent, proxy_url.clone()), - proxy: proxy_url, + client, + proxy: proxy_uri, } } } impl HttpClient for HttpClientWithProxy { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -105,11 +106,12 @@ impl HttpClient for HttpClientWithProxy { } impl HttpClient for Arc { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -123,14 +125,35 @@ pub struct HttpClientWithUrl { client: HttpClientWithProxy, } +impl std::ops::Deref for HttpClientWithUrl { + type Target = HttpClientWithProxy; + + fn deref(&self) -> &Self::Target { + &self.client + } +} + impl HttpClientWithUrl { /// Returns a new [`HttpClientWithUrl`] with the given base URL. pub fn new( + client: Arc, base_url: impl Into, - user_agent: Option, proxy_url: Option, ) -> Self { - let client = HttpClientWithProxy::new(user_agent, proxy_url); + let client = HttpClientWithProxy::new(client, proxy_url); + + Self { + base_url: Mutex::new(base_url.into()), + client, + } + } + + pub fn new_uri( + client: Arc, + base_url: impl Into, + proxy_uri: Option, + ) -> Self { + let client = HttpClientWithProxy::new_uri(client, proxy_uri); Self { base_url: Mutex::new(base_url.into()), @@ -195,11 +218,12 @@ impl HttpClientWithUrl { } impl HttpClient for Arc { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -208,11 +232,12 @@ impl HttpClient for Arc { } impl HttpClient for HttpClientWithUrl { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -220,26 +245,7 @@ impl HttpClient for HttpClientWithUrl { } } -pub fn client(user_agent: Option, proxy: Option) -> Arc { - let mut builder = isahc::HttpClient::builder() - // Some requests to Qwen2 models on Runpod can take 32+ seconds, - // especially if there's a cold boot involved. We may need to have - // those requests use a different http client, because global timeouts - // of 50 and 60 seconds, respectively, would be very high! - .connect_timeout(Duration::from_secs(5)) - .low_speed_timeout(100, Duration::from_secs(30)) - .proxy(proxy.clone()); - if let Some(user_agent) = user_agent { - builder = builder.default_header("User-Agent", user_agent); - } - - Arc::new(HttpClientWithProxy { - client: Arc::new(builder.build().unwrap()), - proxy, - }) -} - -fn read_proxy_from_env() -> Option { +pub fn read_proxy_from_env() -> Option { const ENV_VARS: &[&str] = &[ "ALL_PROXY", "all_proxy", @@ -258,23 +264,9 @@ fn read_proxy_from_env() -> Option { None } -impl HttpClient for isahc::HttpClient { - fn send( - &self, - req: Request, - ) -> BoxFuture<'static, Result, Error>> { - let client = self.clone(); - Box::pin(async move { client.send_async(req).await }) - } - - fn proxy(&self) -> Option<&Uri> { - None - } -} - #[cfg(feature = "test-support")] type FakeHttpHandler = Box< - dyn Fn(Request) -> BoxFuture<'static, Result, Error>> + dyn Fn(Request) -> BoxFuture<'static, Result, anyhow::Error>> + Send + Sync + 'static, @@ -289,7 +281,7 @@ pub struct FakeHttpClient { impl FakeHttpClient { pub fn create(handler: F) -> Arc where - Fut: futures::Future, Error>> + Send + 'static, + Fut: futures::Future, anyhow::Error>> + Send + 'static, F: Fn(Request) -> Fut + Send + Sync + 'static, { Arc::new(HttpClientWithUrl { @@ -331,12 +323,13 @@ impl fmt::Debug for FakeHttpClient { #[cfg(feature = "test-support")] impl HttpClient for FakeHttpClient { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { + _follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { let future = (self.handler)(req); - Box::pin(async move { future.await.map(Into::into) }) + future } fn proxy(&self) -> Option<&Uri> { diff --git a/crates/isahc_http_client/Cargo.toml b/crates/isahc_http_client/Cargo.toml new file mode 100644 index 00000000000000..b90163ef7495d5 --- /dev/null +++ b/crates/isahc_http_client/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "isahc_http_client" +version = "0.1.0" +edition = "2021" +publish = false +license = "Apache-2.0" + +[lints] +workspace = true + +[features] +test-support = [] + +[lib] +path = "src/isahc_http_client.rs" + +[dependencies] +http_client.workspace = true +isahc.workspace = true +futures.workspace = true +anyhow.workspace = true +util.workspace = true diff --git a/crates/isahc_http_client/LICENSE-APACHE b/crates/isahc_http_client/LICENSE-APACHE new file mode 120000 index 00000000000000..1cd601d0a3affa --- /dev/null +++ b/crates/isahc_http_client/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/isahc_http_client/src/isahc_http_client.rs b/crates/isahc_http_client/src/isahc_http_client.rs new file mode 100644 index 00000000000000..6c40b9f53b3f8e --- /dev/null +++ b/crates/isahc_http_client/src/isahc_http_client.rs @@ -0,0 +1,93 @@ +use std::{mem, sync::Arc, time::Duration}; + +use futures::future::BoxFuture; +use isahc::config::RedirectPolicy; +use util::maybe; + +pub use isahc::config::Configurable; +pub struct IsahcHttpClient(isahc::HttpClient); + +pub use http_client::*; + +impl IsahcHttpClient { + pub fn new(proxy: Option, user_agent: Option) -> Arc { + let mut builder = isahc::HttpClient::builder() + .connect_timeout(Duration::from_secs(5)) + .low_speed_timeout(100, Duration::from_secs(5)) + .proxy(proxy.clone()); + if let Some(agent) = user_agent { + builder = builder.default_header("User-Agent", agent); + } + Arc::new(IsahcHttpClient(builder.build().unwrap())) + } + pub fn builder() -> isahc::HttpClientBuilder { + isahc::HttpClientBuilder::new() + } +} + +impl From for IsahcHttpClient { + fn from(client: isahc::HttpClient) -> Self { + Self(client) + } +} + +impl HttpClient for IsahcHttpClient { + fn proxy(&self) -> Option<&Uri> { + None + } + + fn send_with_redirect_policy( + &self, + req: http_client::http::Request, + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> + { + let req = maybe!({ + let (mut parts, body) = req.into_parts(); + let mut builder = isahc::Request::builder() + .method(parts.method) + .uri(parts.uri) + .version(parts.version); + + let headers = builder.headers_mut()?; + mem::swap(headers, &mut parts.headers); + + let extensions = builder.extensions_mut()?; + mem::swap(extensions, &mut parts.extensions); + + let isahc_body = match body.0 { + http_client::Inner::Empty => isahc::AsyncBody::empty(), + http_client::Inner::AsyncReader(reader) => isahc::AsyncBody::from_reader(reader), + http_client::Inner::SyncReader(reader) => { + isahc::AsyncBody::from_bytes_static(reader.into_inner()) + } + }; + + builder + .redirect_policy(if follow_redirects { + RedirectPolicy::Follow + } else { + RedirectPolicy::None + }) + .body(isahc_body) + .ok() + }); + + let client = self.0.clone(); + + Box::pin(async move { + match req { + Some(req) => client + .send_async(req) + .await + .map_err(Into::into) + .map(|response| { + let (parts, body) = response.into_parts(); + let body = http_client::AsyncBody::from_reader(body); + http_client::Response::from_parts(parts, body) + }), + None => Err(anyhow::anyhow!("Request was malformed")), + } + }) + } +} diff --git a/crates/ollama/Cargo.toml b/crates/ollama/Cargo.toml index 76a8b1a8c16cfc..34d8802b977df3 100644 --- a/crates/ollama/Cargo.toml +++ b/crates/ollama/Cargo.toml @@ -19,7 +19,6 @@ schemars = ["dep:schemars"] anyhow.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index a65d6eaf900312..972520e61f53e5 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -1,7 +1,6 @@ use anyhow::{anyhow, Context, Result}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{http, AsyncBody, HttpClient, Method, Request as HttpRequest}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::{value::RawValue, Value}; @@ -262,18 +261,14 @@ pub async fn stream_chat_completion( client: &dyn HttpClient, api_url: &str, request: ChatRequest, - low_speed_timeout: Option, + _: Option, ) -> Result>> { let uri = format!("{api_url}/api/chat"); - let mut request_builder = HttpRequest::builder() + let request_builder = http::Request::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); - if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); - }; - let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; if response.status().is_success() { @@ -305,18 +300,14 @@ pub async fn stream_chat_completion( pub async fn get_models( client: &dyn HttpClient, api_url: &str, - low_speed_timeout: Option, + _: Option, ) -> Result> { let uri = format!("{api_url}/api/tags"); - let mut request_builder = HttpRequest::builder() + let request_builder = HttpRequest::builder() .method(Method::GET) .uri(uri) .header("Accept", "application/json"); - if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); - }; - let request = request_builder.body(AsyncBody::default())?; let mut response = client.send(request).await?; @@ -354,13 +345,13 @@ pub async fn preload_model(client: Arc, api_url: &str, model: &s let mut response = match client.send(request).await { Ok(response) => response, - Err(err) => { + Err(error) => { // Be ok with a timeout during preload of the model - if err.is_timeout() { - return Ok(()); - } else { - return Err(err.into()); - } + // if err.is_timeout() { + // return Ok(()); + // } else { + return Err(error); + //} } }; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6dd528147be322..58d9ba8926737d 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -26,7 +26,7 @@ use gpui::{ AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel, }; -use http_client::{AsyncBody, Error, HttpClient, Request, Response, Uri}; +use http_client::{AsyncBody, HttpClient, Request, Response, Uri}; use language::{ language_settings::{ all_language_settings, language_settings, AllLanguageSettings, LanguageSettings, @@ -7339,7 +7339,7 @@ impl HttpClient for BlockedHttpClient { fn send( &self, _req: Request, - ) -> BoxFuture<'static, Result, Error>> { + ) -> BoxFuture<'static, Result, anyhow::Error>> { Box::pin(async { Err(std::io::Error::new( std::io::ErrorKind::PermissionDenied, @@ -7352,6 +7352,14 @@ impl HttpClient for BlockedHttpClient { fn proxy(&self) -> Option<&Uri> { None } + + fn send_with_redirect_policy( + &self, + req: Request, + _: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.send(req) + } } struct SshLspAdapterDelegate { diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index c8dbb6a9f53dd2..691d6e57f6d448 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -51,6 +51,7 @@ workspace.workspace = true worktree.workspace = true [dev-dependencies] +isahc_http_client.workspace = true env_logger.workspace = true client = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index 977473d1dc38ba..0cc3f9f317b08f 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -2,6 +2,7 @@ use client::Client; use futures::channel::oneshot; use gpui::App; use http_client::HttpClientWithUrl; +use isahc_http_client::IsahcHttpClient; use language::language_settings::AllLanguageSettings; use project::Project; use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb}; @@ -26,8 +27,12 @@ fn main() { }); let clock = Arc::new(FakeSystemClock::default()); - let http = Arc::new(HttpClientWithUrl::new("http://localhost:11434", None, None)); + let http = Arc::new(HttpClientWithUrl::new( + IsahcHttpClient::new(None, None), + "http://localhost:11434", + None, + )); let client = client::Client::new(clock, http.clone(), cx); Client::set_global(client.clone(), cx); diff --git a/crates/semantic_index/src/embedding/ollama.rs b/crates/semantic_index/src/embedding/ollama.rs index 09d33c584a9af5..6d3fa679021ee3 100644 --- a/crates/semantic_index/src/embedding/ollama.rs +++ b/crates/semantic_index/src/embedding/ollama.rs @@ -1,5 +1,5 @@ use anyhow::{Context as _, Result}; -use futures::{future::BoxFuture, AsyncReadExt, FutureExt}; +use futures::{future::BoxFuture, AsyncReadExt as _, FutureExt}; use http_client::HttpClient; use serde::{Deserialize, Serialize}; use std::sync::Arc; diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 28d2c7f82576d8..7fa9602a141073 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -47,6 +47,7 @@ file_finder.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true +isahc_http_client.workspace = true git.workspace = true git_hosting_providers.workspace = true go_to_line.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index eb6d2853fdad7e..d3a722ec657898 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -11,7 +11,7 @@ use assistant::PromptBuilder; use chrono::Offset; use clap::{command, Parser}; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; -use client::{parse_zed_link, Client, DevServerToken, UserStore}; +use client::{parse_zed_link, Client, DevServerToken, ProxySettings, UserStore}; use collab_ui::channel_view::ChannelView; use db::kvp::KEY_VALUE_STORE; use editor::Editor; @@ -23,6 +23,8 @@ use gpui::{ Action, App, AppContext, AsyncAppContext, Context, DismissEvent, Global, Task, UpdateGlobal as _, VisualContext, }; +use http_client::{read_proxy_from_env, Uri}; +use isahc_http_client::IsahcHttpClient; use language::LanguageRegistry; use log::LevelFilter; @@ -327,7 +329,10 @@ fn main() { init_logger(); log::info!("========== starting zed =========="); - let app = App::new().with_assets(Assets); + + let app = App::new() + .with_assets(Assets) + .with_http_client(IsahcHttpClient::new(None, None)); let (installation_id, existing_installation_id_found) = app .background_executor() @@ -436,6 +441,26 @@ fn main() { if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") { AppCommitSha::set_global(AppCommitSha(build_sha.into()), cx); } + settings::init(cx); + client::init_settings(cx); + let user_agent = format!( + "Zed/{} ({}; {})", + AppVersion::global(cx), + std::env::consts::OS, + std::env::consts::ARCH + ); + let proxy_str = ProxySettings::get_global(cx).proxy.to_owned(); + let proxy_url = proxy_str + .as_ref() + .and_then(|input| { + input + .parse::() + .inspect_err(|e| log::error!("Error parsing proxy settings: {}", e)) + .ok() + }) + .or_else(read_proxy_from_env); + let http = IsahcHttpClient::new(proxy_url, Some(user_agent)); + cx.set_http_client(http); ::set_global(fs.clone(), cx); @@ -444,11 +469,9 @@ fn main() { OpenListener::set_global(cx, open_listener.clone()); - settings::init(cx); handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed); handle_keymap_file_changes(user_keymap_file_rx, cx, handle_keymap_changed); - client::init_settings(cx); let client = Client::production(cx); cx.set_http_client(client.http_client().clone()); let mut languages = LanguageRegistry::new(cx.background_executor().clone()); diff --git a/script/bundle-linux b/script/bundle-linux index 029d748f4f10bc..deecd0984bece3 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -43,7 +43,10 @@ script/generate-licenses # Build binary in release mode export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib" -cargo build --release --target "${target_triple}" --package zed --package cli --package remote_server +cargo build --release --target "${target_triple}" --package zed --package cli +# Build remote_server in separate invocation to prevent feature unification from other crates +# from influencing dynamic libraries required by it. +cargo build --release --target "${target_triple}" --package remote_server # Strip the binary of all debug symbols # Later, we probably want to do something like this: https://github.com/GabrielMajeri/separate-symbols @@ -51,6 +54,10 @@ strip --strip-debug "${target_dir}/${target_triple}/release/zed" strip --strip-debug "${target_dir}/${target_triple}/release/cli" strip --strip-debug "${target_dir}/${target_triple}/release/remote_server" + +# Ensure that remote_server does not depend on libssl nor libcrypto, as we got rid of these deps. +ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl' + suffix="" if [ "$channel" != "stable" ]; then suffix="-$channel" From a62e8f6396bf41176ddd00cbc705b699d71fe6cf Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 18:05:30 -0400 Subject: [PATCH 195/762] ci: Explicitly set cache-provider for swatinem/rust-cache (#18034) - Switches the Cache Dependencies step (`swatinem/rust-cache`) of Linux tests to use buildjet as `cache-provider`. Explicitly add 'github' (the default cache provider) to other uses of `swatinem/rust-cache` for consistency. Release Notes: - N/A --- .github/workflows/ci.yml | 2 ++ .github/workflows/publish_extension_cli.yml | 1 + 2 files changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c55a3a9907f47f..f059b470040129 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -115,6 +115,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "buildjet" - name: Install Linux dependencies run: ./script/linux @@ -143,6 +144,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "github" - name: cargo clippy # Windows can't run shell scripts, so we need to use `cargo xtask`. diff --git a/.github/workflows/publish_extension_cli.yml b/.github/workflows/publish_extension_cli.yml index 698a09ad007f88..7c47ec5dedfdd1 100644 --- a/.github/workflows/publish_extension_cli.yml +++ b/.github/workflows/publish_extension_cli.yml @@ -24,6 +24,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "github" - name: Configure linux shell: bash -euxo pipefail {0} From 2cd9a88f53954051f639b120940c06d7bebcf250 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 19:39:15 -0400 Subject: [PATCH 196/762] Clean up after `isahc_http_client` introduction (#18045) This PR does some clean up after #15446. Release Notes: - N/A --- Cargo.toml | 5 +---- crates/collab/Cargo.toml | 2 +- crates/collab/src/rpc.rs | 1 - crates/extension/src/extension_store.rs | 4 ++++ crates/isahc_http_client/Cargo.toml | 4 ++-- crates/zed/Cargo.toml | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 2071fdcb6ff484..c72fec020fe678 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -174,9 +174,6 @@ members = [ default-members = ["crates/zed"] [workspace.dependencies] - - - # # Workspace member crates # @@ -216,7 +213,6 @@ file_icons = { path = "crates/file_icons" } fs = { path = "crates/fs" } fsevent = { path = "crates/fsevent" } fuzzy = { path = "crates/fuzzy" } -isahc_http_client = { path = "crates/isahc_http_client" } git = { path = "crates/git" } git_hosting_providers = { path = "crates/git_hosting_providers" } go_to_line = { path = "crates/go_to_line" } @@ -231,6 +227,7 @@ image_viewer = { path = "crates/image_viewer" } indexed_docs = { path = "crates/indexed_docs" } inline_completion_button = { path = "crates/inline_completion_button" } install_cli = { path = "crates/install_cli" } +isahc_http_client = { path = "crates/isahc_http_client" } journal = { path = "crates/journal" } language = { path = "crates/language" } language_model = { path = "crates/language_model" } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 296809158dd14e..ad43d2d1f0cf50 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -36,8 +36,8 @@ envy = "0.4.2" futures.workspace = true google_ai.workspace = true hex.workspace = true -isahc_http_client.workspace = true http_client.workspace = true +isahc_http_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true log.workspace = true diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index b2a694027a6f8b..bc0f827e78ba51 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -956,7 +956,6 @@ impl Server { tracing::info!("connection opened"); - let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)), diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index bd416f4029f496..8dbd618a25784b 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -827,6 +827,7 @@ impl ExtensionStore { let mut extension_manifest = ExtensionManifest::load(fs.clone(), &extension_source_path).await?; let extension_id = extension_manifest.id.clone(); + if !this.update(&mut cx, |this, cx| { match this.outstanding_operations.entry(extension_id.clone()) { btree_map::Entry::Occupied(_) => return false, @@ -850,6 +851,7 @@ impl ExtensionStore { .ok(); } }); + cx.background_executor() .spawn({ let extension_source_path = extension_source_path.clone(); @@ -880,8 +882,10 @@ impl ExtensionStore { bail!("extension {extension_id} is already installed"); } } + fs.create_symlink(output_path, extension_source_path) .await?; + this.update(&mut cx, |this, cx| this.reload(None, cx))? .await; Ok(()) diff --git a/crates/isahc_http_client/Cargo.toml b/crates/isahc_http_client/Cargo.toml index b90163ef7495d5..82f7621bf8cace 100644 --- a/crates/isahc_http_client/Cargo.toml +++ b/crates/isahc_http_client/Cargo.toml @@ -15,8 +15,8 @@ test-support = [] path = "src/isahc_http_client.rs" [dependencies] +anyhow.workspace = true +futures.workspace = true http_client.workspace = true isahc.workspace = true -futures.workspace = true -anyhow.workspace = true util.workspace = true diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 7fa9602a141073..645d12fc76a350 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -47,7 +47,6 @@ file_finder.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true -isahc_http_client.workspace = true git.workspace = true git_hosting_providers.workspace = true go_to_line.workspace = true @@ -58,6 +57,7 @@ image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true isahc.workspace = true +isahc_http_client.workspace = true journal.workspace = true language.workspace = true language_model.workspace = true From 106ca5076fd8d485a9016fa202d618efb66e40dc Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 18 Sep 2024 16:43:59 -0700 Subject: [PATCH 197/762] Fix leak of LMDB connection in semantic index (#17992) Apparently, to close LMDB's file descriptors when using the `heed` library, you need to explicitly call `prepare_for_closing`. Release Notes: - N/A --------- Co-authored-by: Richard Feldman Co-authored-by: Jason --- crates/evals/src/eval.rs | 9 +++++++++ crates/semantic_index/src/semantic_index.rs | 12 +++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 751dcd09aa1f5c..708cfa7511a402 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -446,6 +446,15 @@ async fn run_evaluation( println!("{}", serde_json::to_string(&query_results).unwrap()); } + + user_store + .update(cx, |_, _| { + drop(semantic_index); + drop(project); + drop(worktree); + drop(project_index); + }) + .unwrap(); } eprint!( diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index 3435d0a9ca4f79..6c97ece024c7fe 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -25,7 +25,7 @@ pub use summary_index::FileSummary; pub struct SemanticDb { embedding_provider: Arc, - db_connection: heed::Env, + db_connection: Option, project_indices: HashMap, Model>, } @@ -70,7 +70,7 @@ impl SemanticDb { .ok(); Ok(SemanticDb { - db_connection, + db_connection: Some(db_connection), embedding_provider, project_indices: HashMap::default(), }) @@ -148,7 +148,7 @@ impl SemanticDb { let project_index = cx.new_model(|cx| { ProjectIndex::new( project.clone(), - self.db_connection.clone(), + self.db_connection.clone().unwrap(), self.embedding_provider.clone(), cx, ) @@ -171,6 +171,12 @@ impl SemanticDb { } } +impl Drop for SemanticDb { + fn drop(&mut self) { + self.db_connection.take().unwrap().prepare_for_closing(); + } +} + #[cfg(test)] mod tests { use super::*; From eef44aff7f9b17f1ea38cbc64ac52bbbd435ef10 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 19:48:34 -0400 Subject: [PATCH 198/762] extension: Re-enable `test_extension_store_with_test_extension` test (#18046) The `test_extension_store_with_test_extension` test was disabled in #15446, which got merged before re-enabling the test. This PR re-enables that test. Release Notes: - N/A --- crates/extension/src/extension_store_test.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 0fbd00e0b4ca8a..4bdafaa32c2aff 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -457,8 +457,6 @@ async fn test_extension_store(cx: &mut TestAppContext) { }); } -// TODO remove -#[ignore] #[gpui::test] async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { init_test(cx); @@ -564,10 +562,6 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice())); encoder.read_to_end(&mut gzipped_bytes).await.unwrap(); Ok(Response::new(gzipped_bytes.into())) - // } else if uri == WASI_ADAPTER_URL { - // let binary_contents = - // include_bytes!("wasi_snapshot_preview1.reactor.wasm").as_slice(); - // Ok(Response::new(binary_contents.into())) } else { Ok(Response::builder().status(404).body("not found".into())?) } From b43b800a54919103062e8fd7f5ff82c80026f211 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 18 Sep 2024 18:07:39 -0600 Subject: [PATCH 199/762] More assistant events (#18032) Release Notes: - N/A --- crates/assistant/src/context.rs | 3 +- crates/assistant/src/inline_assistant.rs | 29 +++++++++++++++++++ .../src/terminal_inline_assistant.rs | 1 + crates/client/src/telemetry.rs | 8 +++-- crates/collab/src/api/events.rs | 2 ++ .../telemetry_events/src/telemetry_events.rs | 28 +++++++++++++++++- 6 files changed, 66 insertions(+), 5 deletions(-) diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index d55b1aee088642..d72b04e3cddb17 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -46,7 +46,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use telemetry_events::AssistantKind; +use telemetry_events::{AssistantKind, AssistantPhase}; use text::BufferSnapshot; use util::{post_inc, ResultExt, TryFutureExt}; use uuid::Uuid; @@ -2134,6 +2134,7 @@ impl Context { telemetry.report_assistant_event( Some(this.id.0.clone()), AssistantKind::Panel, + AssistantPhase::Response, model.telemetry_id(), response_latency, error_message, diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index eb1bc1eee8b0da..c9360213ae5138 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -174,6 +174,18 @@ impl InlineAssistant { initial_prompt: Option, cx: &mut WindowContext, ) { + if let Some(telemetry) = self.telemetry.as_ref() { + if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { + telemetry.report_assistant_event( + None, + telemetry_events::AssistantKind::Inline, + telemetry_events::AssistantPhase::Invoked, + model.telemetry_id(), + None, + None, + ); + } + } let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); let mut selections = Vec::>::new(); @@ -708,6 +720,22 @@ impl InlineAssistant { } pub fn finish_assist(&mut self, assist_id: InlineAssistId, undo: bool, cx: &mut WindowContext) { + if let Some(telemetry) = self.telemetry.as_ref() { + if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { + telemetry.report_assistant_event( + None, + telemetry_events::AssistantKind::Inline, + if undo { + telemetry_events::AssistantPhase::Rejected + } else { + telemetry_events::AssistantPhase::Accepted + }, + model.telemetry_id(), + None, + None, + ); + } + } if let Some(assist) = self.assists.get(&assist_id) { let assist_group_id = assist.group_id; if self.assist_groups[&assist_group_id].linked { @@ -2558,6 +2586,7 @@ impl Codegen { telemetry.report_assistant_event( None, telemetry_events::AssistantKind::Inline, + telemetry_events::AssistantPhase::Response, model_telemetry_id, response_latency, error_message, diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 06661944d96a79..caf819bae535ee 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -1066,6 +1066,7 @@ impl Codegen { telemetry.report_assistant_event( None, telemetry_events::AssistantKind::Inline, + telemetry_events::AssistantPhase::Response, model_telemetry_id, response_latency, error_message, diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index b415cae14c60a2..46304819a4db5b 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -16,9 +16,9 @@ use std::io::Write; use std::{env, mem, path::PathBuf, sync::Arc, time::Duration}; use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System}; use telemetry_events::{ - ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CpuEvent, EditEvent, - EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, - MemoryEvent, ReplEvent, SettingEvent, + ActionEvent, AppEvent, AssistantEvent, AssistantKind, AssistantPhase, CallEvent, CpuEvent, + EditEvent, EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, + InlineCompletionEvent, MemoryEvent, ReplEvent, SettingEvent, }; use tempfile::NamedTempFile; #[cfg(not(debug_assertions))] @@ -391,6 +391,7 @@ impl Telemetry { self: &Arc, conversation_id: Option, kind: AssistantKind, + phase: AssistantPhase, model: String, response_latency: Option, error_message: Option, @@ -398,6 +399,7 @@ impl Telemetry { let event = Event::Assistant(AssistantEvent { conversation_id, kind, + phase, model: model.to_string(), response_latency, error_message, diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 30ed10a76fb0c0..45c25d261e1de8 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -834,6 +834,7 @@ pub struct AssistantEventRow { // AssistantEventRow conversation_id: String, kind: String, + phase: String, model: String, response_latency_in_ms: Option, error_message: Option, @@ -866,6 +867,7 @@ impl AssistantEventRow { time: time.timestamp_millis(), conversation_id: event.conversation_id.unwrap_or_default(), kind: event.kind.to_string(), + phase: event.phase.to_string(), model: event.model, response_latency_in_ms: event .response_latency diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index 87ecfb76b6f7e7..eb84322e83d94c 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -44,7 +44,6 @@ pub enum AssistantKind { Panel, Inline, } - impl Display for AssistantKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( @@ -58,6 +57,31 @@ impl Display for AssistantKind { } } +#[derive(Default, Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum AssistantPhase { + #[default] + Response, + Invoked, + Accepted, + Rejected, +} + +impl Display for AssistantPhase { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + match self { + Self::Response => "response", + Self::Invoked => "invoked", + Self::Accepted => "accepted", + Self::Rejected => "rejected", + } + ) + } +} + #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(tag = "type")] pub enum Event { @@ -121,6 +145,8 @@ pub struct AssistantEvent { pub conversation_id: Option, /// The kind of assistant (Panel, Inline) pub kind: AssistantKind, + #[serde(default)] + pub phase: AssistantPhase, /// Name of the AI model used (gpt-4o, claude-3-5-sonnet, etc) pub model: String, pub response_latency: Option, From 43e005e936e13947ed99799375bcbfa35703b8cd Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 19 Sep 2024 02:19:58 +0200 Subject: [PATCH 200/762] chore: Remove commented out code following 15446 (#18047) Closes #ISSUE Release Notes: - N/A --- crates/ollama/src/ollama.rs | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 972520e61f53e5..51c48290488147 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -343,17 +343,7 @@ pub async fn preload_model(client: Arc, api_url: &str, model: &s }), )?))?; - let mut response = match client.send(request).await { - Ok(response) => response, - Err(error) => { - // Be ok with a timeout during preload of the model - // if err.is_timeout() { - // return Ok(()); - // } else { - return Err(error); - //} - } - }; + let mut response = client.send(request).await?; if response.status().is_success() { Ok(()) From c3f47b8040a83b6414b8a28399628370fb7224f4 Mon Sep 17 00:00:00 2001 From: hekmyr <163496286+hekmyr@users.noreply.github.com> Date: Thu, 19 Sep 2024 02:28:31 +0200 Subject: [PATCH 201/762] vim: Fix increment/decrement command (#17644) Improving vim increment and decrement command. Closes: #16672 ## Release Notes: - vim: Improved edge-case handling for ctrl-a/ctrl-x --------- Co-authored-by: Conrad Irwin --- crates/vim/src/normal/increment.rs | 241 ++++++++++++++++-- ...st_increment_bin_wrapping_and_padding.json | 10 + .../test_data/test_increment_hex_casing.json | 5 + ...st_increment_hex_wrapping_and_padding.json | 10 + .../vim/test_data/test_increment_inline.json | 10 + .../test_data/test_increment_sign_change.json | 6 + .../test_data/test_increment_wrapping.json | 13 + 7 files changed, 273 insertions(+), 22 deletions(-) create mode 100644 crates/vim/test_data/test_increment_bin_wrapping_and_padding.json create mode 100644 crates/vim/test_data/test_increment_hex_casing.json create mode 100644 crates/vim/test_data/test_increment_hex_wrapping_and_padding.json create mode 100644 crates/vim/test_data/test_increment_inline.json create mode 100644 crates/vim/test_data/test_increment_sign_change.json create mode 100644 crates/vim/test_data/test_increment_wrapping.json diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 8786eae872bbed..6d66e380c30b80 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -28,18 +28,18 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.record_current_action(cx); let count = vim.take_count(cx).unwrap_or(1); let step = if action.step { 1 } else { 0 }; - vim.increment(count as i32, step, cx) + vim.increment(count as i64, step, cx) }); Vim::action(editor, cx, |vim, action: &Decrement, cx| { vim.record_current_action(cx); let count = vim.take_count(cx).unwrap_or(1); let step = if action.step { -1 } else { 0 }; - vim.increment(-(count as i32), step, cx) + vim.increment(-(count as i64), step, cx) }); } impl Vim { - fn increment(&mut self, mut delta: i32, step: i32, cx: &mut ViewContext) { + fn increment(&mut self, mut delta: i64, step: i32, cx: &mut ViewContext) { self.store_visual_marks(cx); self.update_editor(cx, |vim, editor, cx| { let mut edits = Vec::new(); @@ -60,23 +60,14 @@ impl Vim { }; if let Some((range, num, radix)) = find_number(&snapshot, start) { - if let Ok(val) = i32::from_str_radix(&num, radix) { - let result = val + delta; - delta += step; - let replace = match radix { - 10 => format!("{}", result), - 16 => { - if num.to_ascii_lowercase() == num { - format!("{:x}", result) - } else { - format!("{:X}", result) - } - } - 2 => format!("{:b}", result), - _ => unreachable!(), - }; - edits.push((range.clone(), replace)); - } + let replace = match radix { + 10 => increment_decimal_string(&num, delta), + 16 => increment_hex_string(&num, delta), + 2 => increment_binary_string(&num, delta), + _ => unreachable!(), + }; + delta += step as i64; + edits.push((range.clone(), replace)); if selection.is_empty() { new_anchors.push((false, snapshot.anchor_after(range.end))) } @@ -107,6 +98,70 @@ impl Vim { } } +fn increment_decimal_string(mut num: &str, mut delta: i64) -> String { + let mut negative = false; + if num.chars().next() == Some('-') { + negative = true; + delta = 0 - delta; + num = &num[1..]; + } + let result = if let Ok(value) = u64::from_str_radix(num, 10) { + let wrapped = value.wrapping_add_signed(delta); + if delta < 0 && wrapped > value { + negative = !negative; + (u64::MAX - wrapped).wrapping_add(1) + } else if delta > 0 && wrapped < value { + negative = !negative; + u64::MAX - wrapped + } else { + wrapped + } + } else { + u64::MAX + }; + + if result == 0 || !negative { + format!("{}", result) + } else { + format!("-{}", result) + } +} + +fn increment_hex_string(num: &str, delta: i64) -> String { + let result = if let Ok(val) = u64::from_str_radix(&num, 16) { + val.wrapping_add_signed(delta) + } else { + u64::MAX + }; + if should_use_lowercase(num) { + format!("{:0width$x}", result, width = num.len()) + } else { + format!("{:0width$X}", result, width = num.len()) + } +} + +fn should_use_lowercase(num: &str) -> bool { + let mut use_uppercase = false; + for ch in num.chars() { + if ch.is_ascii_lowercase() { + return true; + } + if ch.is_ascii_uppercase() { + use_uppercase = true; + } + } + !use_uppercase +} + +fn increment_binary_string(num: &str, delta: i64) -> String { + let result = if let Ok(val) = u64::from_str_radix(&num, 2) { + val.wrapping_add_signed(delta) + } else { + u64::MAX + }; + format!("{:0width$b}", result, width = num.len()) +} + fn find_number( snapshot: &MultiBufferSnapshot, start: Point, @@ -114,10 +169,10 @@ fn find_number( let mut offset = start.to_offset(snapshot); let ch0 = snapshot.chars_at(offset).next(); - if ch0.as_ref().is_some_and(char::is_ascii_digit) || matches!(ch0, Some('-' | 'b' | 'x')) { + if ch0.as_ref().is_some_and(char::is_ascii_hexdigit) || matches!(ch0, Some('-' | 'b' | 'x')) { // go backwards to the start of any number the selection is within for ch in snapshot.reversed_chars_at(offset) { - if ch.is_ascii_digit() || ch == '-' || ch == 'b' || ch == 'x' { + if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' { offset -= ch.len_utf8(); continue; } @@ -158,6 +213,8 @@ fn find_number( begin = Some(offset); } num.push(ch); + println!("pushing {}", ch); + println!(); } else if begin.is_some() { end = Some(offset); break; @@ -250,6 +307,146 @@ mod test { "}); } + #[gpui::test] + async fn test_increment_sign_change(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + ˇ0 + "}) + .await; + cx.simulate_shared_keystrokes("ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + -ˇ1 + "}); + cx.simulate_shared_keystrokes("2 ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + ˇ1 + "}); + } + + #[gpui::test] + async fn test_increment_bin_wrapping_and_padding(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 0b111111111111111111111111111111111111111111111111111111111111111111111ˇ1 + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000000000000000000000000000000000000000000000000000000000000000000ˇ0 + "}); + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000000000000000000000000000000000000000000000000000000000000000000ˇ1 + "}); + cx.simulate_shared_keystrokes("2 ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1 + "}); + } + + #[gpui::test] + async fn test_increment_hex_wrapping_and_padding(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 0xfffffffffffffffffffˇf + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000fffffffffffffffˇf + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000000000000000000ˇ0 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000000000000000000ˇ1 + "}); + cx.simulate_shared_keystrokes("2 ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000fffffffffffffffˇf + "}); + } + + #[gpui::test] + async fn test_increment_wrapping(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 1844674407370955161ˇ9 + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 1844674407370955161ˇ5 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + -1844674407370955161ˇ5 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + -1844674407370955161ˇ4 + "}); + cx.simulate_shared_keystrokes("3 ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + 1844674407370955161ˇ4 + "}); + cx.simulate_shared_keystrokes("2 ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + -1844674407370955161ˇ5 + "}); + } + + #[gpui::test] + async fn test_increment_inline(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + inline0x3ˇ9u32 + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + inline0x3ˇau32 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + inline0x3ˇbu32 + "}); + cx.simulate_shared_keystrokes("l l l ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + inline0x3bu3ˇ3 + "}); + } + + #[gpui::test] + async fn test_increment_hex_casing(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 0xFˇa + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0xfˇb + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0xfˇc + "}); + } + #[gpui::test] async fn test_increment_radix(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json b/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json new file mode 100644 index 00000000000000..4f1a6aa1d364b7 --- /dev/null +++ b/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json @@ -0,0 +1,10 @@ +{"Put":{"state":"0b111111111111111111111111111111111111111111111111111111111111111111111ˇ1\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ0\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ1\n","mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-x"} +{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_hex_casing.json b/crates/vim/test_data/test_increment_hex_casing.json new file mode 100644 index 00000000000000..951906fa255578 --- /dev/null +++ b/crates/vim/test_data/test_increment_hex_casing.json @@ -0,0 +1,5 @@ +{"Put":{"state":"0xFˇa\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0xfˇb\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0xfˇc\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json b/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json new file mode 100644 index 00000000000000..23a561126487c6 --- /dev/null +++ b/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json @@ -0,0 +1,10 @@ +{"Put":{"state":"0xfffffffffffffffffffˇf\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0x0000000000000000000ˇ0\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0x0000000000000000000ˇ1\n","mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-x"} +{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_inline.json b/crates/vim/test_data/test_increment_inline.json new file mode 100644 index 00000000000000..98c4fc28052724 --- /dev/null +++ b/crates/vim/test_data/test_increment_inline.json @@ -0,0 +1,10 @@ +{"Put":{"state":"inline0x3ˇ9u32\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"inline0x3ˇau32\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"inline0x3ˇbu32\n", "mode":"Normal"}} +{"Key":"l"} +{"Key":"l"} +{"Key":"l"} +{"Key":"ctrl-a"} +{"Get":{"state":"inline0x3bu3ˇ3\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_sign_change.json b/crates/vim/test_data/test_increment_sign_change.json new file mode 100644 index 00000000000000..1f4edd57b456af --- /dev/null +++ b/crates/vim/test_data/test_increment_sign_change.json @@ -0,0 +1,6 @@ +{"Put":{"state":"ˇ0\n"}} +{"Key":"ctrl-x"} +{"Get":{"state":"-ˇ1\n","mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-a"} +{"Get":{"state":"ˇ1\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_wrapping.json b/crates/vim/test_data/test_increment_wrapping.json new file mode 100644 index 00000000000000..9f84c8cb1145d4 --- /dev/null +++ b/crates/vim/test_data/test_increment_wrapping.json @@ -0,0 +1,13 @@ +{"Put":{"state":"1844674407370955161ˇ9\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"1844674407370955161ˇ5\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"-1844674407370955161ˇ4\n", "mode":"Normal"}} +{"Key":"3"} +{"Key":"ctrl-x"} +{"Get":{"state":"1844674407370955161ˇ4\n", "mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-a"} +{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}} From 1b612108bae7e4c7ac194e5803b4144fbc218df6 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 19 Sep 2024 11:40:01 +0200 Subject: [PATCH 202/762] linux: Fix invalid check for denylisted dependencies (#18050) Closes #ISSUE Release Notes: - N/A --- script/bundle-linux | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/bundle-linux b/script/bundle-linux index deecd0984bece3..c519f3b9abf0da 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -56,7 +56,7 @@ strip --strip-debug "${target_dir}/${target_triple}/release/remote_server" # Ensure that remote_server does not depend on libssl nor libcrypto, as we got rid of these deps. -ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl' +! ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl' suffix="" if [ "$channel" != "stable" ]; then From 5e6d1814e5c69d4e2e50d01744f5effe2b92ce70 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 19 Sep 2024 12:22:10 +0200 Subject: [PATCH 203/762] Add stray UI tweaks on the task picker (#18059) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds tiny UI tweaks to the task picker. Just making sure it is consistent with other pickers throughout Zed. | Before | After | |--------|--------| | Screenshot 2024-09-19 at 12 07 44 PM | Screenshot 2024-09-19 at 12 07 09 PM | Release Notes: - N/A --- crates/tasks_ui/src/modal.rs | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 1255d3a94e392d..931a0b09c365fb 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -410,7 +410,7 @@ impl PickerDelegate for TasksModalDelegate { Some( ListItem::new(SharedString::from(format!("tasks-modal-{ix}"))) - .inset(false) + .inset(true) .start_slot::(icon) .end_slot::(history_run_icon) .spacing(ListItemSpacing::Sparse) @@ -448,7 +448,7 @@ impl PickerDelegate for TasksModalDelegate { picker.refresh(cx); })) .tooltip(|cx| { - Tooltip::text("Delete previously scheduled task", cx) + Tooltip::text("Delete Previously Scheduled Task", cx) }), ); item.end_hover_slot(delete_button) @@ -499,7 +499,7 @@ impl PickerDelegate for TasksModalDelegate { .last_scheduled_task(None) .is_some() { - Some(("Rerun last task", Rerun::default().boxed_clone())) + Some(("Rerun Last Task", Rerun::default().boxed_clone())) } else { None }; @@ -511,6 +511,8 @@ impl PickerDelegate for TasksModalDelegate { .justify_between() .rounded_b_md() .bg(cx.theme().colors().ghost_element_selected) + .border_t_1() + .border_color(cx.theme().colors().border_variant) .child( left_button .map(|(label, action)| { @@ -535,9 +537,9 @@ impl PickerDelegate for TasksModalDelegate { .boxed_clone(); this.children(KeyBinding::for_action(&*action, cx).map(|keybind| { let spawn_oneshot_label = if current_modifiers.secondary() { - "Spawn oneshot without history" + "Spawn Oneshot Without History" } else { - "Spawn oneshot" + "Spawn Oneshot" }; Button::new("spawn-onehshot", spawn_oneshot_label) @@ -549,9 +551,9 @@ impl PickerDelegate for TasksModalDelegate { this.children(KeyBinding::for_action(&menu::SecondaryConfirm, cx).map( |keybind| { let label = if is_recent_selected { - "Rerun without history" + "Rerun Without History" } else { - "Spawn without history" + "Spawn Without History" }; Button::new("spawn", label) .label_size(LabelSize::Small) From ca4980df02aa2618ebcb3969963c6fdc8ac23fd7 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 19 Sep 2024 07:20:27 -0400 Subject: [PATCH 204/762] Add system_id (#18040) This PR adds `system_id` to telemetry, which is contained within a new `global` database (accessible by any release channel of Zed on a single system). This will help us get a more accurate understanding of user count, instead of relying on `installationd_id`, which is different per release channel. This doesn't solve the problem of a user with multiple machines, but it gets us closer. Release Notes: - N/A --- crates/client/src/telemetry.rs | 17 ++-- crates/collab/src/api/events.rs | 25 ++++-- crates/db/src/db.rs | 68 ++++++++------ crates/db/src/kvp.rs | 30 +++++++ crates/feedback/src/feedback_modal.rs | 6 +- .../telemetry_events/src/telemetry_events.rs | 8 +- crates/zed/src/main.rs | 88 ++++++++++++++----- crates/zed/src/reliability.rs | 4 +- 8 files changed, 184 insertions(+), 62 deletions(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 46304819a4db5b..6c1803df3d02d5 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -37,9 +37,10 @@ pub struct Telemetry { struct TelemetryState { settings: TelemetrySettings, - metrics_id: Option>, // Per logged-in user + system_id: Option>, // Per system installation_id: Option>, // Per app installation (different for dev, nightly, preview, and stable) session_id: Option, // Per app launch + metrics_id: Option>, // Per logged-in user release_channel: Option<&'static str>, architecture: &'static str, events_queue: Vec, @@ -191,9 +192,10 @@ impl Telemetry { settings: *TelemetrySettings::get_global(cx), architecture: env::consts::ARCH, release_channel, + system_id: None, installation_id: None, - metrics_id: None, session_id: None, + metrics_id: None, events_queue: Vec::new(), flush_events_task: None, log_file: None, @@ -283,11 +285,13 @@ impl Telemetry { pub fn start( self: &Arc, + system_id: Option, installation_id: Option, session_id: String, cx: &mut AppContext, ) { let mut state = self.state.lock(); + state.system_id = system_id.map(|id| id.into()); state.installation_id = installation_id.map(|id| id.into()); state.session_id = Some(session_id); state.app_version = release_channel::AppVersion::global(cx).to_string(); @@ -637,9 +641,10 @@ impl Telemetry { let state = this.state.lock(); let request_body = EventRequestBody { + system_id: state.system_id.as_deref().map(Into::into), installation_id: state.installation_id.as_deref().map(Into::into), - metrics_id: state.metrics_id.as_deref().map(Into::into), session_id: state.session_id.clone(), + metrics_id: state.metrics_id.as_deref().map(Into::into), is_staff: state.is_staff, app_version: state.app_version.clone(), os_name: state.os_name.clone(), @@ -711,6 +716,7 @@ mod tests { Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), )); let http = FakeHttpClient::with_200_response(); + let system_id = Some("system_id".to_string()); let installation_id = Some("installation_id".to_string()); let session_id = "session_id".to_string(); @@ -718,7 +724,7 @@ mod tests { let telemetry = Telemetry::new(clock.clone(), http, cx); telemetry.state.lock().max_queue_size = 4; - telemetry.start(installation_id, session_id, cx); + telemetry.start(system_id, installation_id, session_id, cx); assert!(is_empty_state(&telemetry)); @@ -796,13 +802,14 @@ mod tests { Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), )); let http = FakeHttpClient::with_200_response(); + let system_id = Some("system_id".to_string()); let installation_id = Some("installation_id".to_string()); let session_id = "session_id".to_string(); cx.update(|cx| { let telemetry = Telemetry::new(clock.clone(), http, cx); telemetry.state.lock().max_queue_size = 4; - telemetry.start(installation_id, session_id, cx); + telemetry.start(system_id, installation_id, session_id, cx); assert!(is_empty_state(&telemetry)); diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 45c25d261e1de8..1be8f9c37b4d09 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -149,7 +149,8 @@ pub async fn post_crash( installation_id = %installation_id, description = %description, backtrace = %summary, - "crash report"); + "crash report" + ); if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { let payload = slack::WebhookBody::new(|w| { @@ -627,7 +628,9 @@ where #[derive(Serialize, Debug, clickhouse::Row)] pub struct EditorEventRow { + system_id: String, installation_id: String, + session_id: Option, metrics_id: String, operation: String, app_version: String, @@ -647,7 +650,6 @@ pub struct EditorEventRow { historical_event: bool, architecture: String, is_staff: Option, - session_id: Option, major: Option, minor: Option, patch: Option, @@ -677,9 +679,10 @@ impl EditorEventRow { os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), architecture: body.architecture.clone(), + system_id: body.system_id.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), - metrics_id: body.metrics_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), + metrics_id: body.metrics_id.clone().unwrap_or_default(), is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, @@ -699,6 +702,7 @@ impl EditorEventRow { #[derive(Serialize, Debug, clickhouse::Row)] pub struct InlineCompletionEventRow { installation_id: String, + session_id: Option, provider: String, suggestion_accepted: bool, app_version: String, @@ -713,7 +717,6 @@ pub struct InlineCompletionEventRow { city: String, time: i64, is_staff: Option, - session_id: Option, major: Option, minor: Option, patch: Option, @@ -879,7 +882,9 @@ impl AssistantEventRow { #[derive(Debug, clickhouse::Row, Serialize)] pub struct CpuEventRow { + system_id: Option, installation_id: Option, + session_id: Option, is_staff: Option, usage_as_percentage: f32, core_count: u32, @@ -888,7 +893,6 @@ pub struct CpuEventRow { os_name: String, os_version: String, time: i64, - session_id: Option, // pub normalized_cpu_usage: f64, MATERIALIZED major: Option, minor: Option, @@ -917,6 +921,7 @@ impl CpuEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -940,6 +945,7 @@ pub struct MemoryEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -971,6 +977,7 @@ impl MemoryEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -994,6 +1001,7 @@ pub struct AppEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1024,6 +1032,7 @@ impl AppEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1046,6 +1055,7 @@ pub struct SettingEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1076,6 +1086,7 @@ impl SettingEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1099,6 +1110,7 @@ pub struct ExtensionEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1134,6 +1146,7 @@ impl ExtensionEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1224,6 +1237,7 @@ pub struct EditEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, // Note: This column name has a typo in the ClickHouse table. #[serde(rename = "sesssion_id")] @@ -1261,6 +1275,7 @@ impl EditEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs index 768f382203020d..4d87222c773ab1 100644 --- a/crates/db/src/db.rs +++ b/crates/db/src/db.rs @@ -11,16 +11,14 @@ pub use smol; pub use sqlez; pub use sqlez_macros; -use release_channel::ReleaseChannel; pub use release_channel::RELEASE_CHANNEL; use sqlez::domain::Migrator; use sqlez::thread_safe_connection::ThreadSafeConnection; use sqlez_macros::sql; -use std::env; use std::future::Future; use std::path::Path; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::LazyLock; +use std::sync::{atomic::Ordering, LazyLock}; +use std::{env, sync::atomic::AtomicBool}; use util::{maybe, ResultExt}; const CONNECTION_INITIALIZE_QUERY: &str = sql!( @@ -47,16 +45,12 @@ pub static ALL_FILE_DB_FAILED: LazyLock = LazyLock::new(|| AtomicBoo /// This will retry a couple times if there are failures. If opening fails once, the db directory /// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created. /// In either case, static variables are set so that the user can be notified. -pub async fn open_db( - db_dir: &Path, - release_channel: &ReleaseChannel, -) -> ThreadSafeConnection { +pub async fn open_db(db_dir: &Path, scope: &str) -> ThreadSafeConnection { if *ZED_STATELESS { return open_fallback_db().await; } - let release_channel_name = release_channel.dev_name(); - let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name))); + let main_db_dir = db_dir.join(format!("0-{}", scope)); let connection = maybe!(async { smol::fs::create_dir_all(&main_db_dir) @@ -118,7 +112,7 @@ pub async fn open_test_db(db_name: &str) -> ThreadSafeConnection /// Implements a basic DB wrapper for a given domain #[macro_export] macro_rules! define_connection { - (pub static ref $id:ident: $t:ident<()> = $migrations:expr;) => { + (pub static ref $id:ident: $t:ident<()> = $migrations:expr; $($global:ident)?) => { pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>); impl ::std::ops::Deref for $t { @@ -139,18 +133,23 @@ macro_rules! define_connection { } } - use std::sync::LazyLock; #[cfg(any(test, feature = "test-support"))] - pub static $id: LazyLock<$t> = LazyLock::new(|| { + pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { $t($crate::smol::block_on($crate::open_test_db(stringify!($id)))) }); #[cfg(not(any(test, feature = "test-support")))] - pub static $id: LazyLock<$t> = LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL))) + pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { + let db_dir = $crate::database_dir(); + let scope = if false $(|| stringify!($global) == "global")? { + "global" + } else { + $crate::RELEASE_CHANNEL.dev_name() + }; + $t($crate::smol::block_on($crate::open_db(db_dir, scope))) }); }; - (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr;) => { + (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr; $($global:ident)?) => { pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>); impl ::std::ops::Deref for $t { @@ -178,7 +177,13 @@ macro_rules! define_connection { #[cfg(not(any(test, feature = "test-support")))] pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL))) + let db_dir = $crate::database_dir(); + let scope = if false $(|| stringify!($global) == "global")? { + "global" + } else { + $crate::RELEASE_CHANNEL.dev_name() + }; + $t($crate::smol::block_on($crate::open_db(db_dir, scope))) }); }; } @@ -225,7 +230,11 @@ mod tests { .prefix("DbTests") .tempdir() .unwrap(); - let _bad_db = open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let _bad_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; } /// Test that DB exists but corrupted (causing recreate) @@ -262,13 +271,19 @@ mod tests { .tempdir() .unwrap(); { - let corrupt_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let corrupt_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!(corrupt_db.persistent()); } - let good_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let good_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!( good_db.select_row::("SELECT * FROM test2").unwrap()() .unwrap() @@ -311,8 +326,11 @@ mod tests { .unwrap(); { // Setup the bad database - let corrupt_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let corrupt_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!(corrupt_db.persistent()); } @@ -323,7 +341,7 @@ mod tests { let guard = thread::spawn(move || { let good_db = smol::block_on(open_db::( tmp_path.as_path(), - &release_channel::ReleaseChannel::Dev, + &release_channel::ReleaseChannel::Dev.dev_name(), )); assert!( good_db.select_row::("SELECT * FROM test2").unwrap()() diff --git a/crates/db/src/kvp.rs b/crates/db/src/kvp.rs index 0b0cdd9aa11177..c9d994d34da7d1 100644 --- a/crates/db/src/kvp.rs +++ b/crates/db/src/kvp.rs @@ -60,3 +60,33 @@ mod tests { assert_eq!(db.read_kvp("key-1").unwrap(), None); } } + +define_connection!(pub static ref GLOBAL_KEY_VALUE_STORE: GlobalKeyValueStore<()> = + &[sql!( + CREATE TABLE IF NOT EXISTS kv_store( + key TEXT PRIMARY KEY, + value TEXT NOT NULL + ) STRICT; + )]; + global +); + +impl GlobalKeyValueStore { + query! { + pub fn read_kvp(key: &str) -> Result> { + SELECT value FROM kv_store WHERE key = (?) + } + } + + query! { + pub async fn write_kvp(key: String, value: String) -> Result<()> { + INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?)) + } + } + + query! { + pub async fn delete_kvp(key: String) -> Result<()> { + DELETE FROM kv_store WHERE key = (?) + } + } +} diff --git a/crates/feedback/src/feedback_modal.rs b/crates/feedback/src/feedback_modal.rs index 7369bcd8530c45..a4a07ad2ad44d0 100644 --- a/crates/feedback/src/feedback_modal.rs +++ b/crates/feedback/src/feedback_modal.rs @@ -44,8 +44,8 @@ const FEEDBACK_SUBMISSION_ERROR_TEXT: &str = struct FeedbackRequestBody<'a> { feedback_text: &'a str, email: Option, - metrics_id: Option>, installation_id: Option>, + metrics_id: Option>, system_specs: SystemSpecs, is_staff: bool, } @@ -296,16 +296,16 @@ impl FeedbackModal { } let telemetry = zed_client.telemetry(); - let metrics_id = telemetry.metrics_id(); let installation_id = telemetry.installation_id(); + let metrics_id = telemetry.metrics_id(); let is_staff = telemetry.is_staff(); let http_client = zed_client.http_client(); let feedback_endpoint = http_client.build_url("/api/feedback"); let request = FeedbackRequestBody { feedback_text, email, - metrics_id, installation_id, + metrics_id, system_specs, is_staff: is_staff.unwrap_or(false), }; diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index eb84322e83d94c..d6e737b929e1f5 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -5,12 +5,14 @@ use std::{fmt::Display, sync::Arc, time::Duration}; #[derive(Serialize, Deserialize, Debug)] pub struct EventRequestBody { + /// Identifier unique to each system Zed is installed on + pub system_id: Option, /// Identifier unique to each Zed installation (differs for stable, preview, dev) pub installation_id: Option, /// Identifier unique to each logged in Zed user (randomly generated on first sign in) - pub metrics_id: Option, /// Identifier unique to each Zed session (differs for each time you open Zed) pub session_id: Option, + pub metrics_id: Option, /// True for Zed staff, otherwise false pub is_staff: Option, /// Zed version number @@ -34,6 +36,7 @@ pub struct EventWrapper { pub signed_in: bool, /// Duration between this event's timestamp and the timestamp of the first event in the current batch pub milliseconds_since_first_event: i64, + /// The event itself #[serde(flatten)] pub event: Event, } @@ -245,8 +248,11 @@ pub struct Panic { pub architecture: String, /// The time the panic occurred (UNIX millisecond timestamp) pub panicked_on: i64, + /// Identifier unique to each system Zed is installed on #[serde(skip_serializing_if = "Option::is_none")] + pub system_id: Option, /// Identifier unique to each Zed installation (differs for stable, preview, dev) + #[serde(skip_serializing_if = "Option::is_none")] pub installation_id: Option, /// Identifier unique to each Zed session (differs for each time you open Zed) pub session_id: String, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index d3a722ec657898..c127a975a95e89 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -13,7 +13,7 @@ use clap::{command, Parser}; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; use client::{parse_zed_link, Client, DevServerToken, ProxySettings, UserStore}; use collab_ui::channel_view::ChannelView; -use db::kvp::KEY_VALUE_STORE; +use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE}; use editor::Editor; use env_logger::Builder; use fs::{Fs, RealFs}; @@ -334,19 +334,17 @@ fn main() { .with_assets(Assets) .with_http_client(IsahcHttpClient::new(None, None)); - let (installation_id, existing_installation_id_found) = app - .background_executor() - .block(installation_id()) - .ok() - .unzip(); - + let system_id = app.background_executor().block(system_id()).ok(); + let installation_id = app.background_executor().block(installation_id()).ok(); + let session_id = Uuid::new_v4().to_string(); let session = app.background_executor().block(Session::new()); - let app_version = AppVersion::init(env!("CARGO_PKG_VERSION")); + reliability::init_panic_hook( - installation_id.clone(), app_version, - session.id().to_owned(), + system_id.as_ref().map(|id| id.to_string()), + installation_id.as_ref().map(|id| id.to_string()), + session_id.clone(), ); let (open_listener, mut open_rx) = OpenListener::new(); @@ -491,14 +489,26 @@ fn main() { client::init(&client, cx); language::init(cx); let telemetry = client.telemetry(); - telemetry.start(installation_id.clone(), session.id().to_owned(), cx); - telemetry.report_app_event( - match existing_installation_id_found { - Some(false) => "first open", - _ => "open", - } - .to_string(), + telemetry.start( + system_id.as_ref().map(|id| id.to_string()), + installation_id.as_ref().map(|id| id.to_string()), + session_id, + cx, ); + if let (Some(system_id), Some(installation_id)) = (&system_id, &installation_id) { + match (&system_id, &installation_id) { + (IdType::New(_), IdType::New(_)) => { + telemetry.report_app_event("first open".to_string()); + telemetry.report_app_event("first open for release channel".to_string()); + } + (IdType::Existing(_), IdType::New(_)) => { + telemetry.report_app_event("first open for release channel".to_string()); + } + (_, IdType::Existing(_)) => { + telemetry.report_app_event("open".to_string()); + } + } + } let app_session = cx.new_model(|cx| AppSession::new(session, cx)); let app_state = Arc::new(AppState { @@ -514,7 +524,11 @@ fn main() { AppState::set_global(Arc::downgrade(&app_state), cx); auto_update::init(client.http_client(), cx); - reliability::init(client.http_client(), installation_id, cx); + reliability::init( + client.http_client(), + installation_id.clone().map(|id| id.to_string()), + cx, + ); let prompt_builder = init_common(app_state.clone(), cx); let args = Args::parse(); @@ -755,7 +769,23 @@ async fn authenticate(client: Arc, cx: &AsyncAppContext) -> Result<()> { Ok::<_, anyhow::Error>(()) } -async fn installation_id() -> Result<(String, bool)> { +async fn system_id() -> Result { + let key_name = "system_id".to_string(); + + if let Ok(Some(system_id)) = GLOBAL_KEY_VALUE_STORE.read_kvp(&key_name) { + return Ok(IdType::Existing(system_id)); + } + + let system_id = Uuid::new_v4().to_string(); + + GLOBAL_KEY_VALUE_STORE + .write_kvp(key_name, system_id.clone()) + .await?; + + Ok(IdType::New(system_id)) +} + +async fn installation_id() -> Result { let legacy_key_name = "device_id".to_string(); let key_name = "installation_id".to_string(); @@ -765,11 +795,11 @@ async fn installation_id() -> Result<(String, bool)> { .write_kvp(key_name, installation_id.clone()) .await?; KEY_VALUE_STORE.delete_kvp(legacy_key_name).await?; - return Ok((installation_id, true)); + return Ok(IdType::Existing(installation_id)); } if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(&key_name) { - return Ok((installation_id, true)); + return Ok(IdType::Existing(installation_id)); } let installation_id = Uuid::new_v4().to_string(); @@ -778,7 +808,7 @@ async fn installation_id() -> Result<(String, bool)> { .write_kvp(key_name, installation_id.clone()) .await?; - Ok((installation_id, false)) + Ok(IdType::New(installation_id)) } async fn restore_or_create_workspace( @@ -1087,6 +1117,20 @@ struct Args { dev_server_token: Option, } +#[derive(Clone, Debug)] +enum IdType { + New(String), + Existing(String), +} + +impl ToString for IdType { + fn to_string(&self) -> String { + match self { + IdType::New(id) | IdType::Existing(id) => id.clone(), + } + } +} + fn parse_url_arg(arg: &str, cx: &AppContext) -> Result { match std::fs::canonicalize(Path::new(&arg)) { Ok(path) => Ok(format!( diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 188cf417f7c38b..9e811d7c9afbb4 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -28,8 +28,9 @@ use crate::stdout_is_a_pty; static PANIC_COUNT: AtomicU32 = AtomicU32::new(0); pub fn init_panic_hook( - installation_id: Option, app_version: SemanticVersion, + system_id: Option, + installation_id: Option, session_id: String, ) { let is_pty = stdout_is_a_pty(); @@ -102,6 +103,7 @@ pub fn init_panic_hook( architecture: env::consts::ARCH.into(), panicked_on: Utc::now().timestamp_millis(), backtrace, + system_id: system_id.clone(), installation_id: installation_id.clone(), session_id: session_id.clone(), }; From 1723713dc292074e558935b9f5c81eac7938c396 Mon Sep 17 00:00:00 2001 From: thataboy Date: Thu, 19 Sep 2024 04:43:49 -0700 Subject: [PATCH 205/762] Add ability to copy assistant code block to clipboard or insert into editor, without manual selection (#17853) Some notes: - You can put the cursor on the start or end line with triple backticks, it doesn't actually have to be inside the block. - Placing the cursor outside of a code block does nothing. - Code blocks are determined by counting triple backticks pairs from either start or end of buffer, and nothing else. - If you manually select something, the selection takes precedence over any code blocks. Release Notes: - Added the ability to copy surrounding code blocks in the assistant panel into the clipboard, or inserting them directly into the editor, without manually selecting. Place cursor anywhere in a code block (marked by triple backticks) and use the `assistant::CopyCode` action (`cmd-k c` / `ctrl-k c`) to copy to the clipboard, or the `assistant::InsertIntoEditor` action (`cmd-<` / `ctrl-<`) to insert into editor. --------- Co-authored-by: Thorsten Ball Co-authored-by: Bennet --- Cargo.lock | 2 + assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 1 + crates/assistant/Cargo.toml | 2 + crates/assistant/src/assistant.rs | 1 + crates/assistant/src/assistant_panel.rs | 218 ++++++++++++++++++++++-- 6 files changed, 207 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 652c584fd53795..a37a5350f55eab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -402,6 +402,7 @@ dependencies = [ "indoc", "language", "language_model", + "languages", "log", "markdown", "menu", @@ -436,6 +437,7 @@ dependencies = [ "text", "theme", "toml 0.8.19", + "tree-sitter-md", "ui", "unindent", "util", diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 02fc6d8e04da8a..542f6c2df42b48 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -166,6 +166,7 @@ { "context": "AssistantPanel", "bindings": { + "ctrl-k c": "assistant::CopyCode", "ctrl-g": "search::SelectNextMatch", "ctrl-shift-g": "search::SelectPrevMatch", "alt-m": "assistant::ToggleModelSelector", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 33536cc9ff9e4b..77fac3254bec17 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -188,6 +188,7 @@ { "context": "AssistantPanel", "bindings": { + "cmd-k c": "assistant::CopyCode", "cmd-g": "search::SelectNextMatch", "cmd-shift-g": "search::SelectPrevMatch", "alt-m": "assistant::ToggleModelSelector", diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml index b700702062c0a7..9f715d822474d2 100644 --- a/crates/assistant/Cargo.toml +++ b/crates/assistant/Cargo.toml @@ -94,9 +94,11 @@ editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true language = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } +languages = { workspace = true, features = ["test-support"] } log.workspace = true project = { workspace = true, features = ["test-support"] } rand.workspace = true serde_json_lenient.workspace = true text = { workspace = true, features = ["test-support"] } +tree-sitter-md.workspace = true unindent.workspace = true diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index af7f03ebb35264..d7466878c9ce1e 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -58,6 +58,7 @@ actions!( [ Assist, Split, + CopyCode, CycleMessageRole, QuoteSelection, InsertIntoEditor, diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 5d06720fe0095b..094d187df29789 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -12,11 +12,11 @@ use crate::{ slash_command_picker, terminal_inline_assistant::TerminalInlineAssistant, Assist, CacheStatus, ConfirmCommand, Content, Context, ContextEvent, ContextId, ContextStore, - ContextStoreEvent, CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssistId, - InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, MessageMetadata, - MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, PendingSlashCommand, - PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, SavedContextMetadata, Split, - ToggleFocus, ToggleModelSelector, WorkflowStepResolution, + ContextStoreEvent, CopyCode, CycleMessageRole, DeployHistory, DeployPromptLibrary, + InlineAssistId, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, + MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, + PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, + SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector, WorkflowStepResolution, }; use anyhow::{anyhow, Result}; use assistant_slash_command::{SlashCommand, SlashCommandOutputSection}; @@ -45,7 +45,8 @@ use gpui::{ }; use indexed_docs::IndexedDocsStore; use language::{ - language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset, + language_settings::SoftWrap, BufferSnapshot, Capability, LanguageRegistry, LspAdapterDelegate, + ToOffset, }; use language_model::{ provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId, @@ -56,6 +57,7 @@ use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; use project::lsp_store::LocalLspAdapterDelegate; use project::{Project, Worktree}; +use rope::Point; use search::{buffer_search::DivRegistrar, BufferSearchBar}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings}; @@ -81,9 +83,10 @@ use util::{maybe, ResultExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, item::{self, FollowableItem, Item, ItemHandle}, + notifications::NotificationId, pane::{self, SaveIntent}, searchable::{SearchEvent, SearchableItem}, - DraggedSelection, Pane, Save, ShowConfiguration, ToggleZoom, ToolbarItemEvent, + DraggedSelection, Pane, Save, ShowConfiguration, Toast, ToggleZoom, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, }; use workspace::{searchable::SearchableItemHandle, DraggedTab}; @@ -105,6 +108,7 @@ pub fn init(cx: &mut AppContext) { .register_action(AssistantPanel::inline_assist) .register_action(ContextEditor::quote_selection) .register_action(ContextEditor::insert_selection) + .register_action(ContextEditor::copy_code) .register_action(ContextEditor::insert_dragged_files) .register_action(AssistantPanel::show_configuration) .register_action(AssistantPanel::create_new_context); @@ -3100,6 +3104,40 @@ impl ContextEditor { }); } + /// Returns either the selected text, or the content of the Markdown code + /// block surrounding the cursor. + fn get_selection_or_code_block( + context_editor_view: &View, + cx: &mut ViewContext, + ) -> Option<(String, bool)> { + let context_editor = context_editor_view.read(cx).editor.read(cx); + + if context_editor.selections.newest::(cx).is_empty() { + let snapshot = context_editor.buffer().read(cx).snapshot(cx); + let (_, _, snapshot) = snapshot.as_singleton()?; + + let head = context_editor.selections.newest::(cx).head(); + let offset = snapshot.point_to_offset(head); + + let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; + let text = snapshot + .text_for_range(surrounding_code_block_range) + .collect::(); + + (!text.is_empty()).then_some((text, true)) + } else { + let anchor = context_editor.selections.newest_anchor(); + let text = context_editor + .buffer() + .read(cx) + .read(cx) + .text_for_range(anchor.range()) + .collect::(); + + (!text.is_empty()).then_some((text, false)) + } + } + fn insert_selection( workspace: &mut Workspace, _: &InsertIntoEditor, @@ -3118,17 +3156,7 @@ impl ContextEditor { return; }; - let context_editor = context_editor_view.read(cx).editor.read(cx); - let anchor = context_editor.selections.newest_anchor(); - let text = context_editor - .buffer() - .read(cx) - .read(cx) - .text_for_range(anchor.range()) - .collect::(); - - // If nothing is selected, don't delete the current selection; instead, be a no-op. - if !text.is_empty() { + if let Some((text, _)) = Self::get_selection_or_code_block(&context_editor_view, cx) { active_editor_view.update(cx, |editor, cx| { editor.insert(&text, cx); editor.focus(cx); @@ -3136,6 +3164,36 @@ impl ContextEditor { } } + fn copy_code(workspace: &mut Workspace, _: &CopyCode, cx: &mut ViewContext) { + let result = maybe!({ + let panel = workspace.panel::(cx)?; + let context_editor_view = panel.read(cx).active_context_editor(cx)?; + Self::get_selection_or_code_block(&context_editor_view, cx) + }); + let Some((text, is_code_block)) = result else { + return; + }; + + cx.write_to_clipboard(ClipboardItem::new_string(text)); + + struct CopyToClipboardToast; + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + format!( + "{} copied to clipboard.", + if is_code_block { + "Code block" + } else { + "Selection" + } + ), + ) + .autohide(), + cx, + ); + } + fn insert_dragged_files( workspace: &mut Workspace, action: &InsertDraggedFiles, @@ -4215,6 +4273,48 @@ impl ContextEditor { } } +/// Returns the contents of the *outermost* fenced code block that contains the given offset. +fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Option> { + const CODE_BLOCK_NODE: &'static str = "fenced_code_block"; + const CODE_BLOCK_CONTENT: &'static str = "code_fence_content"; + + let layer = snapshot.syntax_layers().next()?; + + let root_node = layer.node(); + let mut cursor = root_node.walk(); + + // Go to the first child for the given offset + while cursor.goto_first_child_for_byte(offset).is_some() { + // If we're at the end of the node, go to the next one. + // Example: if you have a fenced-code-block, and you're on the start of the line + // right after the closing ```, you want to skip the fenced-code-block and + // go to the next sibling. + if cursor.node().end_byte() == offset { + cursor.goto_next_sibling(); + } + + if cursor.node().start_byte() > offset { + break; + } + + // We found the fenced code block. + if cursor.node().kind() == CODE_BLOCK_NODE { + // Now we need to find the child node that contains the code. + cursor.goto_first_child(); + loop { + if cursor.node().kind() == CODE_BLOCK_CONTENT { + return Some(cursor.node().byte_range()); + } + if !cursor.goto_next_sibling() { + break; + } + } + } + } + + None +} + fn render_fold_icon_button( editor: WeakView, icon: IconName, @@ -5497,3 +5597,85 @@ fn configuration_error(cx: &AppContext) -> Option { None } + +#[cfg(test)] +mod tests { + use super::*; + use gpui::{AppContext, Context}; + use language::Buffer; + use unindent::Unindent; + + #[gpui::test] + fn test_find_code_blocks(cx: &mut AppContext) { + let markdown = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + + let buffer = cx.new_model(|cx| { + let text = r#" + line 0 + line 1 + ```rust + fn main() {} + ``` + line 5 + line 6 + line 7 + ```go + func main() {} + ``` + line 11 + ``` + this is plain text code block + ``` + + ```go + func another() {} + ``` + line 19 + "# + .unindent(); + let mut buffer = Buffer::local(text, cx); + buffer.set_language(Some(markdown.clone()), cx); + buffer + }); + let snapshot = buffer.read(cx).snapshot(); + + let code_blocks = vec![ + Point::new(3, 0)..Point::new(4, 0), + Point::new(9, 0)..Point::new(10, 0), + Point::new(13, 0)..Point::new(14, 0), + Point::new(17, 0)..Point::new(18, 0), + ] + .into_iter() + .map(|range| snapshot.point_to_offset(range.start)..snapshot.point_to_offset(range.end)) + .collect::>(); + + let expected_results = vec![ + (0, None), + (1, None), + (2, Some(code_blocks[0].clone())), + (3, Some(code_blocks[0].clone())), + (4, Some(code_blocks[0].clone())), + (5, None), + (6, None), + (7, None), + (8, Some(code_blocks[1].clone())), + (9, Some(code_blocks[1].clone())), + (10, Some(code_blocks[1].clone())), + (11, None), + (12, Some(code_blocks[2].clone())), + (13, Some(code_blocks[2].clone())), + (14, Some(code_blocks[2].clone())), + (15, None), + (16, Some(code_blocks[3].clone())), + (17, Some(code_blocks[3].clone())), + (18, Some(code_blocks[3].clone())), + (19, None), + ]; + + for (row, expected) in expected_results { + let offset = snapshot.point_to_offset(Point::new(row, 0)); + let range = find_surrounding_code_block(&snapshot, offset); + assert_eq!(range, expected, "unexpected result on row {:?}", row); + } + } +} From 23e1faa48524f55c5a9a2c7be084d730667abecb Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 19 Sep 2024 14:43:56 +0200 Subject: [PATCH 206/762] assistant panel: Fix copying code when trailing newline is missing (#18067) Follow-up to #17853. Apparently tree-sitter-md extends the range of the content node to include the backticks when there is no newline. Release Notes: - N/A Co-authored-by: Bennet --- crates/assistant/src/assistant_panel.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 094d187df29789..da176ebeee61e0 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -3110,6 +3110,8 @@ impl ContextEditor { context_editor_view: &View, cx: &mut ViewContext, ) -> Option<(String, bool)> { + const CODE_FENCE_DELIMITER: &'static str = "```"; + let context_editor = context_editor_view.read(cx).editor.read(cx); if context_editor.selections.newest::(cx).is_empty() { @@ -3120,10 +3122,17 @@ impl ContextEditor { let offset = snapshot.point_to_offset(head); let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; - let text = snapshot + let mut text = snapshot .text_for_range(surrounding_code_block_range) .collect::(); + // If there is no newline trailing the closing three-backticks, then + // tree-sitter-md extends the range of the content node to include + // the backticks. + if text.ends_with(CODE_FENCE_DELIMITER) { + text.drain((text.len() - CODE_FENCE_DELIMITER.len())..); + } + (!text.is_empty()).then_some((text, true)) } else { let anchor = context_editor.selections.newest_anchor(); From 4338ff6be496edcdd86d5b97284f3a5ba9e140c2 Mon Sep 17 00:00:00 2001 From: Casey Watson Date: Thu, 19 Sep 2024 07:01:28 -0600 Subject: [PATCH 207/762] terminal: Add ability to open file from Git diff (#17446) - strip "a/" and "b/" prefix for potential paths. Release Notes: - Allow clicking on filepaths when using `git diff` inside the built-in terminal --- crates/terminal_view/src/terminal_view.rs | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index f19bfa70101ce3..e0b92035d107b3 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -58,6 +58,8 @@ const REGEX_SPECIAL_CHARS: &[char] = &[ const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); +const GIT_DIFF_PATH_PREFIXES: &[char] = &['a', 'b']; + ///Event to transmit the scroll from the element to the view #[derive(Clone, Debug, PartialEq)] pub struct ScrollTerminal(pub i32); @@ -826,6 +828,19 @@ fn possible_open_targets( { potential_cwd_and_workspace_paths.insert(potential_worktree_path); } + + for prefix in GIT_DIFF_PATH_PREFIXES { + let prefix_str = &prefix.to_string(); + if maybe_path.starts_with(prefix_str) { + let stripped = maybe_path.strip_prefix(prefix_str).unwrap_or(&maybe_path); + for potential_worktree_path in workspace + .worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path().join(&stripped)) + { + potential_cwd_and_workspace_paths.insert(potential_worktree_path); + } + } + } }); } From 3d5c023fdae99907cf7cf9e67f7ae20bd7bd080c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 19 Sep 2024 09:55:51 -0400 Subject: [PATCH 208/762] ci: Move collab deploys back to DigitalOcean runners (#18071) This PR moves the collab deployment steps in CI back to the DigitalOcean runners temporarily, so that we can deploy collab. Release Notes: - N/A --- .github/workflows/deploy_collab.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 7abd52e5a60dd3..6801be2a5494cd 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -61,7 +61,8 @@ jobs: - style - tests runs-on: - - buildjet-16vcpu-ubuntu-2204 + - self-hosted + - deploy steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH @@ -88,7 +89,8 @@ jobs: needs: - publish runs-on: - - buildjet-16vcpu-ubuntu-2204 + - self-hosted + - deploy steps: - name: Sign into Kubernetes From d91e62524f7c6437349426687ded6d1182ad7346 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Thu, 19 Sep 2024 22:41:42 +0800 Subject: [PATCH 209/762] assistant: Fix offset calculation not in char boundary (#18069) Closes #17825 Release Notes: - N/A --- crates/assistant/src/prompts.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 83e894f7978746..ae2ab4787e6f40 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -220,7 +220,8 @@ impl PromptBuilder { let before_range = 0..range.start; let truncated_before = if before_range.len() > MAX_CTX { is_truncated = true; - range.start - MAX_CTX..range.start + let start = buffer.clip_offset(range.start - MAX_CTX, text::Bias::Right); + start..range.start } else { before_range }; @@ -228,7 +229,8 @@ impl PromptBuilder { let after_range = range.end..buffer.len(); let truncated_after = if after_range.len() > MAX_CTX { is_truncated = true; - range.end..range.end + MAX_CTX + let end = buffer.clip_offset(range.end + MAX_CTX, text::Bias::Left); + range.end..end } else { after_range }; From d2894ce9c99586e9cd0588fa9b4db27bbd64f0ca Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 19 Sep 2024 17:00:26 +0200 Subject: [PATCH 210/762] pane: Do not autopin new item created as a neighbour of pinned tab (#18072) When I used editor::NewFile or ProjectSearch from a pinned tab, the resulting new tab would be pinned (and the last pinned tab would be pushed off). This PR fixes it by always storing new tabs outside of the pinned area if there's no destination index for the new tab. Release Notes: - Fixed tab bar not preserving pinned tab state when an editor::NewFile action is executed. --- crates/workspace/src/pane.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 09b4683c0c5552..a5f83f961f0400 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -831,13 +831,14 @@ impl Pane { } } } - // If no destination index is specified, add or move the item after the active item. + // If no destination index is specified, add or move the item after the + // active item (or at the start of tab bar, if the active item is pinned) let mut insertion_index = { cmp::min( if let Some(destination_index) = destination_index { destination_index } else { - self.active_item_index + 1 + cmp::max(self.active_item_index + 1, self.pinned_count()) }, self.items.len(), ) From a944bb2f24bea7e492ced79fe0e92a7205d6f42e Mon Sep 17 00:00:00 2001 From: Joseph T Lyons Date: Thu, 19 Sep 2024 11:02:44 -0400 Subject: [PATCH 211/762] v0.155.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a37a5350f55eab..ca5d68881ffb3f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14375,7 +14375,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.154.0" +version = "0.155.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 645d12fc76a350..ad2e7cd48c67d8 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.154.0" +version = "0.155.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From 7d0a7541bfd1ca44a7511ec077067902b0e461ef Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 19 Sep 2024 11:45:06 -0400 Subject: [PATCH 212/762] ci: Fix collab deploys (#18077) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR fixes issues with deploying collab. We reverted 4882a75971abafa89467e779466749086d7d3f96—as the DigitalOcean runners are gone now—and moved back to BuildJet. We needed to make some changes to the deployment jobs to setup `doctl`. This PR also adds an automatic bump of the `collab-staging` tag on merges to `main`. This should help catch issues with collab deploys earlier. Release Notes: - N/A --------- Co-authored-by: Conrad --- .github/workflows/bump_collab_staging.yml | 23 +++++++++++++++++++++++ .github/workflows/deploy_collab.yml | 23 ++++++++++++++++------- 2 files changed, 39 insertions(+), 7 deletions(-) create mode 100644 .github/workflows/bump_collab_staging.yml diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml new file mode 100644 index 00000000000000..89cc7c48481d97 --- /dev/null +++ b/.github/workflows/bump_collab_staging.yml @@ -0,0 +1,23 @@ +name: Bump collab-staging Tag + +on: + push: + branches: + - main + +jobs: + update-collab-staging-tag: + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + fetch-depth: 0 + + - name: Update collab-staging tag + run: | + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f collab-staging + git push origin collab-staging --force diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 6801be2a5494cd..c4193adcd24349 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -8,7 +8,6 @@ on: env: DOCKER_BUILDKIT: 1 - DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} jobs: style: @@ -61,11 +60,12 @@ jobs: - style - tests runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install doctl + uses: digitalocean/action-doctl@v2 + with: + token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} - name: Sign into DigitalOcean docker registry run: doctl registry login @@ -89,10 +89,19 @@ jobs: needs: - publish runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: + - name: Checkout repo + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + clean: false + + - name: Install doctl + uses: digitalocean/action-doctl@v2 + with: + token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} + - name: Sign into Kubernetes run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }} From e9f2e72ff03c60f8a9a1ce9612cc51a368813cf0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 19 Sep 2024 17:51:28 +0200 Subject: [PATCH 213/762] Workspace persistence for SSH projects (#17996) TODOs: - [x] Add tests to `workspace/src/persistence.rs` - [x] Add a icon for ssh projects - [x] Fix all `TODO` comments - [x] Use `port` if it's passed in the ssh connection options In next PRs: - Make sure unsaved buffers are persisted/restored, along with other items/layout - Handle multiple paths/worktrees correctly Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner --- Cargo.lock | 1 + crates/recent_projects/src/dev_servers.rs | 7 +- crates/recent_projects/src/recent_projects.rs | 108 +++-- crates/recent_projects/src/ssh_connections.rs | 67 ++-- crates/remote/src/ssh_session.rs | 5 + crates/sqlez/src/bindable.rs | 16 + crates/sqlez/src/typed_statements.rs | 2 +- crates/workspace/Cargo.toml | 1 + crates/workspace/src/persistence.rs | 374 +++++++++++++++--- crates/workspace/src/persistence/model.rs | 66 +++- crates/workspace/src/workspace.rs | 80 +++- crates/zed/src/main.rs | 6 +- 12 files changed, 592 insertions(+), 141 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ca5d68881ffb3f..16ee627d2c8f08 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14096,6 +14096,7 @@ dependencies = [ "parking_lot", "postage", "project", + "remote", "schemars", "serde", "serde_json", diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index 491f378f30ce64..af5f51f14fca2c 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -39,7 +39,6 @@ use ui::{ RadioWithLabel, Tooltip, }; use ui_input::{FieldLabelLayout, TextField}; -use util::paths::PathWithPosition; use util::ResultExt; use workspace::notifications::NotifyResultExt; use workspace::OpenOptions; @@ -987,11 +986,7 @@ impl DevServerProjects { cx.spawn(|_, mut cx| async move { let result = open_ssh_project( server.into(), - project - .paths - .into_iter() - .map(|path| PathWithPosition::from_path(PathBuf::from(path))) - .collect(), + project.paths.into_iter().map(PathBuf::from).collect(), app_state, OpenOptions::default(), &mut cx, diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 182cec4614d456..cb3d3ab65950f0 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -2,6 +2,7 @@ mod dev_servers; pub mod disconnected_overlay; mod ssh_connections; mod ssh_remotes; +use remote::SshConnectionOptions; pub use ssh_connections::open_ssh_project; use client::{DevServerProjectId, ProjectId}; @@ -32,8 +33,8 @@ use ui::{ }; use util::{paths::PathExt, ResultExt}; use workspace::{ - AppState, CloseIntent, ModalView, SerializedWorkspaceLocation, Workspace, WorkspaceId, - WORKSPACE_DB, + AppState, CloseIntent, ModalView, OpenOptions, SerializedWorkspaceLocation, Workspace, + WorkspaceId, WORKSPACE_DB, }; #[derive(PartialEq, Clone, Deserialize, Default)] @@ -172,7 +173,7 @@ pub struct RecentProjectsDelegate { create_new_window: bool, // Flag to reset index when there is a new query vs not reset index when user delete an item reset_selected_match_index: bool, - has_any_dev_server_projects: bool, + has_any_non_local_projects: bool, } impl RecentProjectsDelegate { @@ -185,16 +186,16 @@ impl RecentProjectsDelegate { create_new_window, render_paths, reset_selected_match_index: true, - has_any_dev_server_projects: false, + has_any_non_local_projects: false, } } pub fn set_workspaces(&mut self, workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation)>) { self.workspaces = workspaces; - self.has_any_dev_server_projects = self + self.has_any_non_local_projects = !self .workspaces .iter() - .any(|(_, location)| matches!(location, SerializedWorkspaceLocation::DevServer(_))); + .all(|(_, location)| matches!(location, SerializedWorkspaceLocation::Local(_, _))); } } impl EventEmitter for RecentProjectsDelegate {} @@ -258,6 +259,23 @@ impl PickerDelegate for RecentProjectsDelegate { dev_server_project.paths.join("") ) } + SerializedWorkspaceLocation::Ssh(ssh_project) => { + format!( + "{}{}{}{}", + ssh_project.host, + ssh_project + .port + .as_ref() + .map(|port| port.to_string()) + .unwrap_or_default(), + ssh_project.path, + ssh_project + .user + .as_ref() + .map(|user| user.to_string()) + .unwrap_or_default() + ) + } }; StringMatchCandidate::new(id, combined_string) @@ -364,6 +382,33 @@ impl PickerDelegate for RecentProjectsDelegate { }; open_dev_server_project(replace_current_window, dev_server_project.id, project_id, cx) } + SerializedWorkspaceLocation::Ssh(ssh_project) => { + let app_state = workspace.app_state().clone(); + + let replace_window = if replace_current_window { + cx.window_handle().downcast::() + } else { + None + }; + + let open_options = OpenOptions { + replace_window, + ..Default::default() + }; + + let connection_options = SshConnectionOptions { + host: ssh_project.host.clone(), + username: ssh_project.user.clone(), + port: ssh_project.port, + password: None, + }; + + let paths = vec![PathBuf::from(ssh_project.path.clone())]; + + cx.spawn(|_, mut cx| async move { + open_ssh_project(connection_options, paths, app_state, open_options, &mut cx).await + }) + } } } }) @@ -392,7 +437,6 @@ impl PickerDelegate for RecentProjectsDelegate { let (_, location) = self.workspaces.get(hit.candidate_id)?; - let is_remote = matches!(location, SerializedWorkspaceLocation::DevServer(_)); let dev_server_status = if let SerializedWorkspaceLocation::DevServer(dev_server_project) = location { let store = dev_server_projects::Store::global(cx).read(cx); @@ -416,6 +460,9 @@ impl PickerDelegate for RecentProjectsDelegate { .filter_map(|i| paths.paths().get(*i).cloned()) .collect(), ), + SerializedWorkspaceLocation::Ssh(ssh_project) => { + Arc::new(vec![PathBuf::from(ssh_project.ssh_url())]) + } SerializedWorkspaceLocation::DevServer(dev_server_project) => { Arc::new(vec![PathBuf::from(format!( "{}:{}", @@ -457,29 +504,34 @@ impl PickerDelegate for RecentProjectsDelegate { h_flex() .flex_grow() .gap_3() - .when(self.has_any_dev_server_projects, |this| { - this.child(if is_remote { - // if disabled, Color::Disabled - let indicator_color = match dev_server_status { - Some(DevServerStatus::Online) => Color::Created, - Some(DevServerStatus::Offline) => Color::Hidden, - _ => unreachable!(), - }; - IconWithIndicator::new( - Icon::new(IconName::Server).color(Color::Muted), - Some(Indicator::dot()), - ) - .indicator_color(indicator_color) - .indicator_border_color(if selected { - Some(cx.theme().colors().element_selected) - } else { - None - }) - .into_any_element() - } else { - Icon::new(IconName::Screen) + .when(self.has_any_non_local_projects, |this| { + this.child(match location { + SerializedWorkspaceLocation::Local(_, _) => { + Icon::new(IconName::Screen) + .color(Color::Muted) + .into_any_element() + } + SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Screen) .color(Color::Muted) + .into_any_element(), + SerializedWorkspaceLocation::DevServer(_) => { + let indicator_color = match dev_server_status { + Some(DevServerStatus::Online) => Color::Created, + Some(DevServerStatus::Offline) => Color::Hidden, + _ => unreachable!(), + }; + IconWithIndicator::new( + Icon::new(IconName::Server).color(Color::Muted), + Some(Indicator::dot()), + ) + .indicator_color(indicator_color) + .indicator_border_color(if selected { + Some(cx.theme().colors().element_selected) + } else { + None + }) .into_any_element() + } }) }) .child({ diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 8da4284b7f56a7..ad23a5c8963b4e 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -19,7 +19,6 @@ use ui::{ h_flex, v_flex, FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label, LabelCommon, Styled, StyledExt as _, ViewContext, VisualContext, WindowContext, }; -use util::paths::PathWithPosition; use workspace::{AppState, ModalView, Workspace}; #[derive(Deserialize)] @@ -358,24 +357,29 @@ pub fn connect_over_ssh( pub async fn open_ssh_project( connection_options: SshConnectionOptions, - paths: Vec, + paths: Vec, app_state: Arc, - _open_options: workspace::OpenOptions, + open_options: workspace::OpenOptions, cx: &mut AsyncAppContext, ) -> Result<()> { let options = cx.update(|cx| (app_state.build_window_options)(None, cx))?; - let window = cx.open_window(options, |cx| { - let project = project::Project::local( - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - None, - cx, - ); - cx.new_view(|cx| Workspace::new(None, project, app_state.clone(), cx)) - })?; + + let window = if let Some(window) = open_options.replace_window { + window + } else { + cx.open_window(options, |cx| { + let project = project::Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + cx, + ); + cx.new_view(|cx| Workspace::new(None, project, app_state.clone(), cx)) + })? + }; let result = window .update(cx, |workspace, cx| { @@ -387,40 +391,17 @@ pub async fn open_ssh_project( .read(cx) .prompt .clone(); - connect_over_ssh(connection_options, ui, cx) + connect_over_ssh(connection_options.clone(), ui, cx) })? .await; if result.is_err() { window.update(cx, |_, cx| cx.remove_window()).ok(); } - let session = result?; - let project = cx.update(|cx| { - project::Project::ssh( - session, - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - cx, - ) - })?; - - for path in paths { - project - .update(cx, |project, cx| { - project.find_or_create_worktree(&path.path, true, cx) - })? - .await?; - } - - window.update(cx, |_, cx| { - cx.replace_root_view(|cx| Workspace::new(None, project, app_state, cx)) - })?; - window.update(cx, |_, cx| cx.activate_window())?; - - Ok(()) + cx.update(|cx| { + workspace::open_ssh_project(window, connection_options, session, app_state, paths, cx) + })? + .await } diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 7556b38f3ed0f4..4aab731e645934 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -33,6 +33,11 @@ use std::{ }; use tempfile::TempDir; +#[derive( + Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, +)] +pub struct SshProjectId(pub u64); + #[derive(Clone)] pub struct SshSocket { connection_options: SshConnectionOptions, diff --git a/crates/sqlez/src/bindable.rs b/crates/sqlez/src/bindable.rs index e8b9679936d742..8cf4329f929891 100644 --- a/crates/sqlez/src/bindable.rs +++ b/crates/sqlez/src/bindable.rs @@ -196,6 +196,22 @@ impl Column for u32 { } } +impl StaticColumnCount for u16 {} +impl Bind for u16 { + fn bind(&self, statement: &Statement, start_index: i32) -> Result { + (*self as i64) + .bind(statement, start_index) + .with_context(|| format!("Failed to bind usize at index {start_index}")) + } +} + +impl Column for u16 { + fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { + let result = statement.column_int64(start_index)?; + Ok((result as u16, start_index + 1)) + } +} + impl StaticColumnCount for usize {} impl Bind for usize { fn bind(&self, statement: &Statement, start_index: i32) -> Result { diff --git a/crates/sqlez/src/typed_statements.rs b/crates/sqlez/src/typed_statements.rs index d7f25cde5174b6..95f4f829ec8c16 100644 --- a/crates/sqlez/src/typed_statements.rs +++ b/crates/sqlez/src/typed_statements.rs @@ -74,7 +74,7 @@ impl Connection { } /// Prepare a statement which takes a binding and selects a single row - /// from the database. WIll return none if no rows are returned and will + /// from the database. Will return none if no rows are returned and will /// error if more than 1 row is returned. /// /// Note: If there are multiple statements that depend upon each other diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 7f5c1ccce8a576..1b998eeabe5373 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -51,6 +51,7 @@ postage.workspace = true project.workspace = true dev_server_projects.workspace = true task.workspace = true +remote.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 88ede4228d622d..034328a30b2ffa 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -7,6 +7,7 @@ use client::DevServerProjectId; use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql}; use gpui::{point, size, Axis, Bounds, WindowBounds, WindowId}; +use remote::ssh_session::SshProjectId; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, @@ -20,7 +21,7 @@ use crate::WorkspaceId; use model::{ GroupId, LocalPaths, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup, - SerializedWorkspace, + SerializedSshProject, SerializedWorkspace, }; use self::model::{ @@ -354,7 +355,17 @@ define_connection! { ), sql!( ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0; - ) + ), + sql!( + CREATE TABLE ssh_projects ( + id INTEGER PRIMARY KEY, + host TEXT NOT NULL, + port INTEGER, + path TEXT NOT NULL, + user TEXT + ); + ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE; + ), ]; } @@ -374,7 +385,6 @@ impl WorkspaceDb { workspace_id, local_paths, local_paths_order, - dev_server_project_id, window_bounds, display, centered_layout, @@ -384,7 +394,6 @@ impl WorkspaceDb { WorkspaceId, Option, Option, - Option, Option, Option, Option, @@ -396,7 +405,6 @@ impl WorkspaceDb { workspace_id, local_paths, local_paths_order, - dev_server_project_id, window_state, window_x, window_y, @@ -422,28 +430,13 @@ impl WorkspaceDb { .warn_on_err() .flatten()?; - let location = if let Some(dev_server_project_id) = dev_server_project_id { - let dev_server_project: SerializedDevServerProject = self - .select_row_bound(sql! { - SELECT id, path, dev_server_name - FROM dev_server_projects - WHERE id = ? - }) - .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) - .context("No remote project found") - .warn_on_err() - .flatten()?; - SerializedWorkspaceLocation::DevServer(dev_server_project) - } else if let Some(local_paths) = local_paths { - match local_paths_order { - Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), - None => { - let order = LocalPathsOrder::default_for_paths(&local_paths); - SerializedWorkspaceLocation::Local(local_paths, order) - } + let local_paths = local_paths?; + let location = match local_paths_order { + Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), + None => { + let order = LocalPathsOrder::default_for_paths(&local_paths); + SerializedWorkspaceLocation::Local(local_paths, order) } - } else { - return None; }; Some(SerializedWorkspace { @@ -470,8 +463,6 @@ impl WorkspaceDb { // and we've grabbed the most recent workspace let ( workspace_id, - local_paths, - local_paths_order, dev_server_project_id, window_bounds, display, @@ -480,8 +471,6 @@ impl WorkspaceDb { window_id, ): ( WorkspaceId, - Option, - Option, Option, Option, Option, @@ -492,8 +481,6 @@ impl WorkspaceDb { .select_row_bound(sql! { SELECT workspace_id, - local_paths, - local_paths_order, dev_server_project_id, window_state, window_x, @@ -520,29 +507,20 @@ impl WorkspaceDb { .warn_on_err() .flatten()?; - let location = if let Some(dev_server_project_id) = dev_server_project_id { - let dev_server_project: SerializedDevServerProject = self - .select_row_bound(sql! { - SELECT id, path, dev_server_name - FROM dev_server_projects - WHERE id = ? - }) - .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) - .context("No remote project found") - .warn_on_err() - .flatten()?; - SerializedWorkspaceLocation::DevServer(dev_server_project) - } else if let Some(local_paths) = local_paths { - match local_paths_order { - Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), - None => { - let order = LocalPathsOrder::default_for_paths(&local_paths); - SerializedWorkspaceLocation::Local(local_paths, order) - } - } - } else { - return None; - }; + let dev_server_project_id = dev_server_project_id?; + + let dev_server_project: SerializedDevServerProject = self + .select_row_bound(sql! { + SELECT id, path, dev_server_name + FROM dev_server_projects + WHERE id = ? + }) + .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) + .context("No remote project found") + .warn_on_err() + .flatten()?; + + let location = SerializedWorkspaceLocation::DevServer(dev_server_project); Some(SerializedWorkspace { id: workspace_id, @@ -560,6 +538,62 @@ impl WorkspaceDb { }) } + pub(crate) fn workspace_for_ssh_project( + &self, + ssh_project: &SerializedSshProject, + ) -> Option { + let (workspace_id, window_bounds, display, centered_layout, docks, window_id): ( + WorkspaceId, + Option, + Option, + Option, + DockStructure, + Option, + ) = self + .select_row_bound(sql! { + SELECT + workspace_id, + window_state, + window_x, + window_y, + window_width, + window_height, + display, + centered_layout, + left_dock_visible, + left_dock_active_panel, + left_dock_zoom, + right_dock_visible, + right_dock_active_panel, + right_dock_zoom, + bottom_dock_visible, + bottom_dock_active_panel, + bottom_dock_zoom, + window_id + FROM workspaces + WHERE ssh_project_id = ? + }) + .and_then(|mut prepared_statement| (prepared_statement)(ssh_project.id.0)) + .context("No workspaces found") + .warn_on_err() + .flatten()?; + + Some(SerializedWorkspace { + id: workspace_id, + location: SerializedWorkspaceLocation::Ssh(ssh_project.clone()), + center_group: self + .get_center_pane_group(workspace_id) + .context("Getting center group") + .log_err()?, + window_bounds, + centered_layout: centered_layout.unwrap_or(false), + display, + docks, + session_id: None, + window_id, + }) + } + /// Saves a workspace using the worktree roots. Will garbage collect any workspaces /// that used this workspace previously pub(crate) async fn save_workspace(&self, workspace: SerializedWorkspace) { @@ -674,6 +708,49 @@ impl WorkspaceDb { workspace.docks, )) .context("Updating workspace")?; + }, + SerializedWorkspaceLocation::Ssh(ssh_project) => { + conn.exec_bound(sql!( + DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ? + ))?((ssh_project.id.0, workspace.id)) + .context("clearing out old locations")?; + + // Upsert + conn.exec_bound(sql!( + INSERT INTO workspaces( + workspace_id, + ssh_project_id, + left_dock_visible, + left_dock_active_panel, + left_dock_zoom, + right_dock_visible, + right_dock_active_panel, + right_dock_zoom, + bottom_dock_visible, + bottom_dock_active_panel, + bottom_dock_zoom, + timestamp + ) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP) + ON CONFLICT DO + UPDATE SET + ssh_project_id = ?2, + left_dock_visible = ?3, + left_dock_active_panel = ?4, + left_dock_zoom = ?5, + right_dock_visible = ?6, + right_dock_active_panel = ?7, + right_dock_zoom = ?8, + bottom_dock_visible = ?9, + bottom_dock_active_panel = ?10, + bottom_dock_zoom = ?11, + timestamp = CURRENT_TIMESTAMP + ))?(( + workspace.id, + ssh_project.id.0, + workspace.docks, + )) + .context("Updating workspace")?; } } @@ -688,6 +765,46 @@ impl WorkspaceDb { .await; } + pub(crate) async fn get_or_create_ssh_project( + &self, + host: String, + port: Option, + path: String, + user: Option, + ) -> Result { + if let Some(project) = self + .get_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await? + { + Ok(project) + } else { + self.insert_ssh_project(host, port, path, user) + .await? + .ok_or_else(|| anyhow!("failed to insert ssh project")) + } + } + + query! { + async fn get_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { + SELECT id, host, port, path, user + FROM ssh_projects + WHERE host IS ? AND port IS ? AND path IS ? AND user IS ? + LIMIT 1 + } + } + + query! { + async fn insert_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { + INSERT INTO ssh_projects( + host, + port, + path, + user + ) VALUES (?1, ?2, ?3, ?4) + RETURNING id, host, port, path, user + } + } + query! { pub async fn next_id() -> Result { INSERT INTO workspaces DEFAULT VALUES RETURNING workspace_id @@ -695,10 +812,12 @@ impl WorkspaceDb { } query! { - fn recent_workspaces() -> Result)>> { - SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id + fn recent_workspaces() -> Result, Option)>> { + SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id, ssh_project_id FROM workspaces - WHERE local_paths IS NOT NULL OR dev_server_project_id IS NOT NULL + WHERE local_paths IS NOT NULL + OR dev_server_project_id IS NOT NULL + OR ssh_project_id IS NOT NULL ORDER BY timestamp DESC } } @@ -719,6 +838,13 @@ impl WorkspaceDb { } } + query! { + fn ssh_projects() -> Result> { + SELECT id, host, port, path, user + FROM ssh_projects + } + } + pub(crate) fn last_window( &self, ) -> anyhow::Result<(Option, Option)> { @@ -768,8 +894,11 @@ impl WorkspaceDb { let mut result = Vec::new(); let mut delete_tasks = Vec::new(); let dev_server_projects = self.dev_server_projects()?; + let ssh_projects = self.ssh_projects()?; - for (id, location, order, dev_server_project_id) in self.recent_workspaces()? { + for (id, location, order, dev_server_project_id, ssh_project_id) in + self.recent_workspaces()? + { if let Some(dev_server_project_id) = dev_server_project_id.map(DevServerProjectId) { if let Some(dev_server_project) = dev_server_projects .iter() @@ -782,6 +911,15 @@ impl WorkspaceDb { continue; } + if let Some(ssh_project_id) = ssh_project_id.map(SshProjectId) { + if let Some(ssh_project) = ssh_projects.iter().find(|rp| rp.id == ssh_project_id) { + result.push((id, SerializedWorkspaceLocation::Ssh(ssh_project.clone()))); + } else { + delete_tasks.push(self.delete_workspace_by_id(id)); + } + continue; + } + if location.paths().iter().all(|path| path.exists()) && location.paths().iter().any(|path| path.is_dir()) { @@ -802,7 +940,9 @@ impl WorkspaceDb { .into_iter() .filter_map(|(_, location)| match location { SerializedWorkspaceLocation::Local(local_paths, _) => Some(local_paths), + // Do not automatically reopen Dev Server and SSH workspaces SerializedWorkspaceLocation::DevServer(_) => None, + SerializedWorkspaceLocation::Ssh(_) => None, }) .next()) } @@ -1512,6 +1652,122 @@ mod tests { assert_eq!(have[3], LocalPaths::new([dir1.path().to_str().unwrap()])); } + #[gpui::test] + async fn test_get_or_create_ssh_project() { + let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project").await); + + let (host, port, path, user) = ( + "example.com".to_string(), + Some(22_u16), + "/home/user".to_string(), + Some("user".to_string()), + ); + + let project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(project.host, host); + assert_eq!(project.path, path); + assert_eq!(project.user, user); + + // Test that calling the function again with the same parameters returns the same project + let same_project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(project.id, same_project.id); + + // Test with different parameters + let (host2, path2, user2) = ( + "otherexample.com".to_string(), + "/home/otheruser".to_string(), + Some("otheruser".to_string()), + ); + + let different_project = db + .get_or_create_ssh_project(host2.clone(), None, path2.clone(), user2.clone()) + .await + .unwrap(); + + assert_ne!(project.id, different_project.id); + assert_eq!(different_project.host, host2); + assert_eq!(different_project.path, path2); + assert_eq!(different_project.user, user2); + } + + #[gpui::test] + async fn test_get_or_create_ssh_project_with_null_user() { + let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project_with_null_user").await); + + let (host, port, path, user) = ( + "example.com".to_string(), + None, + "/home/user".to_string(), + None, + ); + + let project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), None) + .await + .unwrap(); + + assert_eq!(project.host, host); + assert_eq!(project.path, path); + assert_eq!(project.user, None); + + // Test that calling the function again with the same parameters returns the same project + let same_project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(project.id, same_project.id); + } + + #[gpui::test] + async fn test_get_ssh_projects() { + let db = WorkspaceDb(open_test_db("test_get_ssh_projects").await); + + let projects = vec![ + ( + "example.com".to_string(), + None, + "/home/user".to_string(), + None, + ), + ( + "anotherexample.com".to_string(), + Some(123_u16), + "/home/user2".to_string(), + Some("user2".to_string()), + ), + ( + "yetanother.com".to_string(), + Some(345_u16), + "/home/user3".to_string(), + None, + ), + ]; + + for (host, port, path, user) in projects.iter() { + let project = db + .get_or_create_ssh_project(host.clone(), *port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(&project.host, host); + assert_eq!(&project.port, port); + assert_eq!(&project.path, path); + assert_eq!(&project.user, user); + } + + let stored_projects = db.ssh_projects().unwrap(); + assert_eq!(stored_projects.len(), projects.len()); + } + #[gpui::test] async fn test_simple_split() { env_logger::try_init().ok(); diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index d6f8001f25fc3e..0ad3fa5e606e5b 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -11,6 +11,7 @@ use db::sqlez::{ }; use gpui::{AsyncWindowContext, Model, View, WeakView}; use project::Project; +use remote::ssh_session::SshProjectId; use serde::{Deserialize, Serialize}; use std::{ path::{Path, PathBuf}, @@ -20,6 +21,69 @@ use ui::SharedString; use util::ResultExt; use uuid::Uuid; +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct SerializedSshProject { + pub id: SshProjectId, + pub host: String, + pub port: Option, + pub path: String, + pub user: Option, +} + +impl SerializedSshProject { + pub fn ssh_url(&self) -> String { + let mut result = String::from("ssh://"); + if let Some(user) = &self.user { + result.push_str(user); + result.push('@'); + } + result.push_str(&self.host); + if let Some(port) = &self.port { + result.push(':'); + result.push_str(&port.to_string()); + } + result.push_str(&self.path); + result + } +} + +impl StaticColumnCount for SerializedSshProject { + fn column_count() -> usize { + 5 + } +} + +impl Bind for &SerializedSshProject { + fn bind(&self, statement: &Statement, start_index: i32) -> Result { + let next_index = statement.bind(&self.id.0, start_index)?; + let next_index = statement.bind(&self.host, next_index)?; + let next_index = statement.bind(&self.port, next_index)?; + let next_index = statement.bind(&self.path, next_index)?; + statement.bind(&self.user, next_index) + } +} + +impl Column for SerializedSshProject { + fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { + let id = statement.column_int64(start_index)?; + let host = statement.column_text(start_index + 1)?.to_string(); + let (port, _) = Option::::column(statement, start_index + 2)?; + let path = statement.column_text(start_index + 3)?.to_string(); + let (user, _) = Option::::column(statement, start_index + 4)?; + + Ok(( + Self { + id: SshProjectId(id as u64), + host, + port, + path, + user, + }, + start_index + 5, + )) + } +} + #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] pub struct SerializedDevServerProject { pub id: DevServerProjectId, @@ -58,7 +122,6 @@ impl Column for LocalPaths { fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { let path_blob = statement.column_blob(start_index)?; let paths: Arc> = if path_blob.is_empty() { - println!("path blog is empty"); Default::default() } else { bincode::deserialize(path_blob).context("Bincode deserialization of paths failed")? @@ -146,6 +209,7 @@ impl Column for SerializedDevServerProject { #[derive(Debug, PartialEq, Clone)] pub enum SerializedWorkspaceLocation { Local(LocalPaths, LocalPathsOrder), + Ssh(SerializedSshProject), DevServer(SerializedDevServerProject), } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 98ac49992d0a4f..5855dcce1e5919 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -49,15 +49,19 @@ use node_runtime::NodeRuntime; use notifications::{simple_message_notification::MessageNotification, NotificationHandle}; pub use pane::*; pub use pane_group::*; -use persistence::{model::SerializedWorkspace, SerializedWindowBounds, DB}; pub use persistence::{ model::{ItemId, LocalPaths, SerializedDevServerProject, SerializedWorkspaceLocation}, WorkspaceDb, DB as WORKSPACE_DB, }; +use persistence::{ + model::{SerializedSshProject, SerializedWorkspace}, + SerializedWindowBounds, DB, +}; use postage::stream::Stream; use project::{ DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, }; +use remote::{SshConnectionOptions, SshSession}; use serde::Deserialize; use session::AppSession; use settings::Settings; @@ -756,6 +760,7 @@ pub struct Workspace { render_disconnected_overlay: Option) -> AnyElement>>, serializable_items_tx: UnboundedSender>, + serialized_ssh_project: Option, _items_serializer: Task>, session_id: Option, } @@ -1054,6 +1059,7 @@ impl Workspace { serializable_items_tx, _items_serializer, session_id: Some(session_id), + serialized_ssh_project: None, } } @@ -1440,6 +1446,10 @@ impl Workspace { self.on_prompt_for_open_path = Some(prompt) } + pub fn set_serialized_ssh_project(&mut self, serialized_ssh_project: SerializedSshProject) { + self.serialized_ssh_project = Some(serialized_ssh_project); + } + pub fn set_render_disconnected_overlay( &mut self, render: impl Fn(&mut Self, &mut ViewContext) -> AnyElement + 'static, @@ -4097,7 +4107,9 @@ impl Workspace { } } - let location = if let Some(local_paths) = self.local_paths(cx) { + let location = if let Some(ssh_project) = &self.serialized_ssh_project { + Some(SerializedWorkspaceLocation::Ssh(ssh_project.clone())) + } else if let Some(local_paths) = self.local_paths(cx) { if !local_paths.is_empty() { Some(SerializedWorkspaceLocation::from_local_paths(local_paths)) } else { @@ -5476,6 +5488,70 @@ pub fn join_hosted_project( }) } +pub fn open_ssh_project( + window: WindowHandle, + connection_options: SshConnectionOptions, + session: Arc, + app_state: Arc, + paths: Vec, + cx: &mut AppContext, +) -> Task> { + cx.spawn(|mut cx| async move { + // TODO: Handle multiple paths + let path = paths.iter().next().cloned().unwrap_or_default(); + + let serialized_ssh_project = persistence::DB + .get_or_create_ssh_project( + connection_options.host.clone(), + connection_options.port, + path.to_string_lossy().to_string(), + connection_options.username.clone(), + ) + .await?; + + let project = cx.update(|cx| { + project::Project::ssh( + session, + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + cx, + ) + })?; + + for path in paths { + project + .update(&mut cx, |project, cx| { + project.find_or_create_worktree(&path, true, cx) + })? + .await?; + } + + let serialized_workspace = + persistence::DB.workspace_for_ssh_project(&serialized_ssh_project); + + let workspace_id = + if let Some(workspace_id) = serialized_workspace.map(|workspace| workspace.id) { + workspace_id + } else { + persistence::DB.next_id().await? + }; + + cx.update_window(window.into(), |_, cx| { + cx.replace_root_view(|cx| { + let mut workspace = + Workspace::new(Some(workspace_id), project, app_state.clone(), cx); + workspace.set_serialized_ssh_project(serialized_ssh_project); + workspace + }); + })?; + + window.update(&mut cx, |_, cx| cx.activate_window()) + }) +} + pub fn join_dev_server_project( dev_server_project_id: DevServerProjectId, project_id: ProjectId, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index c127a975a95e89..3104001f992726 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -667,7 +667,11 @@ fn handle_open_request( cx.spawn(|mut cx| async move { open_ssh_project( connection_info, - request.open_paths, + request + .open_paths + .into_iter() + .map(|path| path.path) + .collect::>(), app_state, workspace::OpenOptions::default(), &mut cx, From 3fd690ade401f7d665448977c674db4780e23165 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 19 Sep 2024 12:00:13 -0400 Subject: [PATCH 214/762] docs: Update lsp.settings examples for yaml-language-server (#18081) --- docs/src/configuring-zed.md | 21 ++++++++- docs/src/languages/typescript.md | 26 +++++------ docs/src/languages/yaml.md | 80 ++++++++++++++++++++++++++++++-- 3 files changed, 108 insertions(+), 19 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 382c33c216adf9..1befa7d93abb7e 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -575,8 +575,13 @@ Each option controls displaying of a particular toolbar element. If all elements The following settings can be overridden for specific language servers: - `initialization_options` +- `settings` -To override settings for a language, add an entry for that language server's name to the `lsp` value. Example: +To override configuration for a language server, add an entry for that language server's name to the `lsp` value. + +Some options are passed via `initialization_options` to the language server. These are for options which must be specified at language server startup and when changed will require restarting the language server. + +For example to pass the `check` option to `rust-analyzer`, use the following configuration: ```json "lsp": { @@ -590,6 +595,20 @@ To override settings for a language, add an entry for that language server's nam } ``` +While other options may be changed at a runtime and should be placed under `settings`: + +```json +"lsp": { + "yaml-language-server": { + "settings": { + "yaml": { + "keyOrdering": true // Enforces alphabetical ordering of keys in maps + } + } + } +} +``` + ## Format On Save - Description: Whether or not to perform a buffer format before saving. diff --git a/docs/src/languages/typescript.md b/docs/src/languages/typescript.md index feb7d76622f121..080d41efb33c28 100644 --- a/docs/src/languages/typescript.md +++ b/docs/src/languages/typescript.md @@ -72,20 +72,20 @@ You can override these settings in your configuration file: ```json "lsp": { - "$LANGUAGE_SERVER_NAME": { - "initialization_options": { - "preferences": { - "includeInlayParameterNameHints": "all", - "includeInlayParameterNameHintsWhenArgumentMatchesName": true, - "includeInlayFunctionParameterTypeHints": true, - "includeInlayVariableTypeHints": true, - "includeInlayVariableTypeHintsWhenTypeMatchesName": true, - "includeInlayPropertyDeclarationTypeHints": true, - "includeInlayFunctionLikeReturnTypeHints": true, - "includeInlayEnumMemberValueHints": true, - } - } + "$LANGUAGE_SERVER_NAME": { + "initialization_options": { + "preferences": { + "includeInlayParameterNameHints": "all", + "includeInlayParameterNameHintsWhenArgumentMatchesName": true, + "includeInlayFunctionParameterTypeHints": true, + "includeInlayVariableTypeHints": true, + "includeInlayVariableTypeHintsWhenTypeMatchesName": true, + "includeInlayPropertyDeclarationTypeHints": true, + "includeInlayFunctionLikeReturnTypeHints": true, + "includeInlayEnumMemberValueHints": true, + } } + } } ``` diff --git a/docs/src/languages/yaml.md b/docs/src/languages/yaml.md index 5ef614394cac9d..7b840d08252ca4 100644 --- a/docs/src/languages/yaml.md +++ b/docs/src/languages/yaml.md @@ -12,7 +12,7 @@ You can configure various [yaml-language-server settings](https://github.com/red ```json "lsp": { "yaml-language-server": { - "initialization_options": { + "settings": { "yaml": { "keyOrdering": true, "format": { @@ -32,9 +32,9 @@ Note, settings keys must be nested, so `yaml.keyOrdering` becomes `{"yaml": { "k ## Schemas -By default yaml-language-server will attempt to determine the correct schema for a given yaml file and retrieve the appropriate JSON Schema from [Json Schema Store]. +By default yaml-language-server will attempt to determine the correct schema for a given yaml file and retrieve the appropriate JSON Schema from [Json Schema Store](https://schemastore.org/). -You can override this by [using an inlined schema] reference via a modeline comment at the top of your yaml file: +You can override any auto-detected schema via the `schemas` settings key (demonstrated above) or by providing an [inlined schema](https://github.com/redhat-developer/yaml-language-server#using-inlined-schema) reference via a modeline comment at the top of your yaml file: ```yaml # yaml-language-server: $schema=https://json.schemastore.org/github-action.json @@ -44,12 +44,12 @@ on: types: [oppened] ``` -You can disable this functionality entirely if desired: +You can disable the automatic detection and retrieval of schemas from the JSON Schema if desired: ```json "lsp": { "yaml-language-server": { - "initialization_options": { + "settings": { "yaml": { "schemaStore": { "enable": false @@ -59,3 +59,73 @@ You can disable this functionality entirely if desired: } } ``` + +## Custom Tags + +Yaml-language-server supports [custom tags](https://github.com/redhat-developer/yaml-language-server#adding-custom-tags) which can be used to inject custom application functionality at runtime into your yaml files. + +For example Amazon CloudFormation YAML uses a number of custom tags, to support these you can add the following to your settings.json: + +```json + "lsp": { + "yaml-language-server": { + "settings": { + "yaml": { + "customTags": [ + "!And scalar", + "!And mapping", + "!And sequence", + "!If scalar", + "!If mapping", + "!If sequence", + "!Not scalar", + "!Not mapping", + "!Not sequence", + "!Equals scalar", + "!Equals mapping", + "!Equals sequence", + "!Or scalar", + "!Or mapping", + "!Or sequence", + "!FindInMap scalar", + "!FindInMap mapping", + "!FindInMap sequence", + "!Base64 scalar", + "!Base64 mapping", + "!Base64 sequence", + "!Cidr scalar", + "!Cidr mapping", + "!Cidr sequence", + "!Ref scalar", + "!Ref mapping", + "!Ref sequence", + "!Sub scalar", + "!Sub mapping", + "!Sub sequence", + "!GetAtt scalar", + "!GetAtt mapping", + "!GetAtt sequence", + "!GetAZs scalar", + "!GetAZs mapping", + "!GetAZs sequence", + "!ImportValue scalar", + "!ImportValue mapping", + "!ImportValue sequence", + "!Select scalar", + "!Select mapping", + "!Select sequence", + "!Split scalar", + "!Split mapping", + "!Split sequence", + "!Join scalar", + "!Join mapping", + "!Join sequence", + "!Condition scalar", + "!Condition mapping", + "!Condition sequence" + ] + } + } + } + } +``` From 713b39bac0702b008c461c87764a603010562b65 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 19 Sep 2024 10:13:55 -0600 Subject: [PATCH 215/762] Auto deploy collab staging daily (#18085) This should avoid us breaking the collab build and not noticing for a month Release Notes: - N/A --- .github/workflows/bump_collab_staging.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml index 89cc7c48481d97..224325d53ffc2e 100644 --- a/.github/workflows/bump_collab_staging.yml +++ b/.github/workflows/bump_collab_staging.yml @@ -1,9 +1,9 @@ name: Bump collab-staging Tag on: - push: - branches: - - main + schedule: + # Fire every day at 16:00 UTC (At the start of the US workday) + - cron: "0 16 * * *" jobs: update-collab-staging-tag: From 3986bcf9dc23cd32b5155310136f53d9d8a5cc73 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:14:37 -0600 Subject: [PATCH 216/762] Update Rust crate async-trait to v0.1.82 (#18038) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [async-trait](https://redirect.github.com/dtolnay/async-trait) | workspace.dependencies | patch | `0.1.81` -> `0.1.82` | --- ### Release Notes
dtolnay/async-trait (async-trait) ### [`v0.1.82`](https://redirect.github.com/dtolnay/async-trait/releases/tag/0.1.82) [Compare Source](https://redirect.github.com/dtolnay/async-trait/compare/0.1.81...0.1.82) - Prevent elided_named_lifetimes lint being produced in generated code ([#​276](https://redirect.github.com/dtolnay/async-trait/issues/276))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 16ee627d2c8f08..38c3e74ce15409 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -894,9 +894,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.81" +version = "0.1.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" +checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" dependencies = [ "proc-macro2", "quote", From 6670c9eb3b5033caf4b78d59aecab9bee5cb09d1 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:15:31 -0600 Subject: [PATCH 217/762] Update Rust crate backtrace to v0.3.74 (#18039) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [backtrace](https://redirect.github.com/rust-lang/backtrace-rs) | dependencies | patch | `0.3.73` -> `0.3.74` | | [backtrace](https://redirect.github.com/rust-lang/backtrace-rs) | dev-dependencies | patch | `0.3.73` -> `0.3.74` | --- ### Release Notes
rust-lang/backtrace-rs (backtrace) ### [`v0.3.74`](https://redirect.github.com/rust-lang/backtrace-rs/releases/tag/0.3.74) [Compare Source](https://redirect.github.com/rust-lang/backtrace-rs/compare/0.3.73...0.3.74) #### What's Changed - QNX Neutrino 7.0 support, thanks to [@​nyurik](https://redirect.github.com/nyurik) in [https://github.com/rust-lang/backtrace-rs/pull/648](https://redirect.github.com/rust-lang/backtrace-rs/pull/648) - Cleaned up our Android support. This should massively improve backtraces for ones with the API level sufficient to ship with libunwind, etc. Unfortunately, it comes at the cost of dropping support for older ones! Thanks to [@​fengys](https://redirect.github.com/fengys) in [https://github.com/rust-lang/backtrace-rs/pull/656](https://redirect.github.com/rust-lang/backtrace-rs/pull/656) - Made PrintFmt, which was using the `Enum::__NonExhaustiveVariant` pattern, use `#[non_exhaustive]` for real. Don't @​ me if you were matching on that! Thanks to [@​nyurik](https://redirect.github.com/nyurik) in [https://github.com/rust-lang/backtrace-rs/pull/651](https://redirect.github.com/rust-lang/backtrace-rs/pull/651) - Massively cleaned up the windows code! We moved from winapi to windows-sys with windows-targets thanks to [@​CraftSpider](https://redirect.github.com/CraftSpider) and [@​ChrisDenton](https://redirect.github.com/ChrisDenton) in - Don't cast HANDLE to usize and back by [@​CraftSpider](https://redirect.github.com/CraftSpider) in [https://github.com/rust-lang/backtrace-rs/pull/635](https://redirect.github.com/rust-lang/backtrace-rs/pull/635) - Switch from `winapi` to `windows-sys` by [@​CraftSpider](https://redirect.github.com/CraftSpider) in [https://github.com/rust-lang/backtrace-rs/pull/641](https://redirect.github.com/rust-lang/backtrace-rs/pull/641) - Update windows bindings and use windows-targets by [@​ChrisDenton](https://redirect.github.com/ChrisDenton) in [https://github.com/rust-lang/backtrace-rs/pull/653](https://redirect.github.com/rust-lang/backtrace-rs/pull/653) - A bunch of updated dependencies. Thanks [@​djc](https://redirect.github.com/djc) and [@​khuey](https://redirect.github.com/khuey)! - Sorry if you were testing this code in miri! It started yelling about sussy casts. A lot. We did a bunch of internal cleanups that should make it quiet down, thanks to [@​workingjubilee](https://redirect.github.com/workingjubilee) in [https://github.com/rust-lang/backtrace-rs/pull/641](https://redirect.github.com/rust-lang/backtrace-rs/pull/641) - Uhhh we had to tweak `dl_iterate_phdr` in [https://github.com/rust-lang/backtrace-rs/pull/660](https://redirect.github.com/rust-lang/backtrace-rs/pull/660) after Android revealed it was... kind of unsound actually and not doing things like checking for null pointers before making slices! WHOOPS! Thanks to [@​saethlin](https://redirect.github.com/saethlin) for implementing detection for precisely that in rustc! It's really hard to find soundness issues in inherited codebases like this one... #### New Contributors - [@​CraftSpider](https://redirect.github.com/CraftSpider) made their first contribution in [https://github.com/rust-lang/backtrace-rs/pull/635](https://redirect.github.com/rust-lang/backtrace-rs/pull/635) - [@​fengys1996](https://redirect.github.com/fengys1996) made their first contribution in [https://github.com/rust-lang/backtrace-rs/pull/656](https://redirect.github.com/rust-lang/backtrace-rs/pull/656) - [@​djc](https://redirect.github.com/djc) made their first contribution in [https://github.com/rust-lang/backtrace-rs/pull/657](https://redirect.github.com/rust-lang/backtrace-rs/pull/657) **Full Changelog**: https://github.com/rust-lang/backtrace-rs/compare/0.3.73...0.3.74
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about these updates again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 38c3e74ce15409..4a1a58446977fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21,11 +21,11 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" dependencies = [ - "gimli", + "gimli 0.31.0", ] [[package]] @@ -1493,17 +1493,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.7.4", + "miniz_oxide 0.8.0", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -3083,7 +3083,7 @@ dependencies = [ "cranelift-control", "cranelift-entity", "cranelift-isle", - "gimli", + "gimli 0.29.0", "hashbrown 0.14.5", "log", "regalloc2", @@ -4873,6 +4873,12 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "gimli" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" + [[package]] name = "git" version = "0.1.0" @@ -13108,7 +13114,7 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "cranelift-wasm", - "gimli", + "gimli 0.29.0", "log", "object", "target-lexicon", @@ -13128,7 +13134,7 @@ dependencies = [ "cpp_demangle", "cranelift-bitset", "cranelift-entity", - "gimli", + "gimli 0.29.0", "indexmap 2.4.0", "log", "object", @@ -13242,7 +13248,7 @@ checksum = "2a25199625effa4c13dd790d64bd56884b014c69829431bfe43991c740bd5bc1" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", + "gimli 0.29.0", "object", "target-lexicon", "wasmparser 0.215.0", @@ -13539,7 +13545,7 @@ checksum = "073efe897d9ead7fc609874f94580afc831114af5149b6a90ee0a3a39b497fe0" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", + "gimli 0.29.0", "regalloc2", "smallvec", "target-lexicon", From 157c57aa8d3e35a5d7d750ae552740b412b5911b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:15:46 -0600 Subject: [PATCH 218/762] Update Rust crate clap to v4.5.17 (#18041) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [clap](https://redirect.github.com/clap-rs/clap) | workspace.dependencies | patch | `4.5.16` -> `4.5.17` | --- ### Release Notes
clap-rs/clap (clap) ### [`v4.5.17`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4517---2024-09-04) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.16...v4.5.17) ##### Fixes - *(help)* Style required argument groups - *(derive)* Improve error messages when unsupported fields are used
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4a1a58446977fe..68625d5520a047 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2282,9 +2282,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.16" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" +checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" dependencies = [ "clap_builder", "clap_derive", @@ -2292,9 +2292,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.15" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" dependencies = [ "anstream", "anstyle", From ce4f07bd3cbfa20a95e14af112e83002bfd486d4 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:16:31 -0600 Subject: [PATCH 219/762] Update Rust crate globset to v0.4.15 (#18042) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [globset](https://redirect.github.com/BurntSushi/ripgrep/tree/master/crates/globset) ([source](https://redirect.github.com/BurntSushi/ripgrep/tree/HEAD/crates/globset)) | workspace.dependencies | patch | `0.4.14` -> `0.4.15` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68625d5520a047..7c298c2a9ad31c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4946,9 +4946,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" dependencies = [ "aho-corasick", "bstr", From c3bdc1c178190dd223d6b4718905f86822329da3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:18:14 -0600 Subject: [PATCH 220/762] Update Rust crate ignore to v0.4.23 (#18044) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [ignore](https://redirect.github.com/BurntSushi/ripgrep/tree/master/crates/ignore) ([source](https://redirect.github.com/BurntSushi/ripgrep/tree/HEAD/crates/ignore)) | workspace.dependencies | patch | `0.4.22` -> `0.4.23` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7c298c2a9ad31c..26b8847041ba60 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5688,9 +5688,9 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" +checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" dependencies = [ "crossbeam-deque", "globset", @@ -6474,7 +6474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -13528,7 +13528,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] From ac0d5d3152fe09201b907210c917e82fee62d450 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Fri, 20 Sep 2024 00:19:13 +0800 Subject: [PATCH 221/762] windows: Fix regional indicator symbols broken (#18053) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #18027 Unlike macOS, not all glyphs in color fonts are color glyphs, such as `🇩🇪` in `Segoe UI Emoji`. As a result, attempting to retrieve color information for these glyphs can cause an error, preventing the glyph from being rendered. This PR addresses the issue by setting the `is_emoji` variable to `false` for non-color glyphs within color fonts. Release Notes: - N/A --- .../gpui/src/platform/windows/direct_write.rs | 42 ++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/crates/gpui/src/platform/windows/direct_write.rs b/crates/gpui/src/platform/windows/direct_write.rs index 6253881f5aca8b..fb53a833d64e66 100644 --- a/crates/gpui/src/platform/windows/direct_write.rs +++ b/crates/gpui/src/platform/windows/direct_write.rs @@ -1063,7 +1063,7 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl { // This `cast()` action here should never fail since we are running on Win10+, and // `IDWriteFontFace3` requires Win10 let font_face = &font_face.cast::().unwrap(); - let Some((font_identifier, font_struct, is_emoji)) = + let Some((font_identifier, font_struct, color_font)) = get_font_identifier_and_font_struct(font_face, &self.locale) else { return Ok(()); @@ -1084,6 +1084,8 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl { context .index_converter .advance_to_utf16_ix(context.utf16_index); + let is_emoji = color_font + && is_color_glyph(font_face, id, &context.text_system.components.factory); glyphs.push(ShapedGlyph { id, position: point(px(context.width), px(0.0)), @@ -1446,6 +1448,44 @@ fn get_render_target_property( } } +// One would think that with newer DirectWrite method: IDWriteFontFace4::GetGlyphImageFormats +// but that doesn't seem to work for some glyphs, say ❤ +fn is_color_glyph( + font_face: &IDWriteFontFace3, + glyph_id: GlyphId, + factory: &IDWriteFactory5, +) -> bool { + let glyph_run = DWRITE_GLYPH_RUN { + fontFace: unsafe { std::mem::transmute_copy(font_face) }, + fontEmSize: 14.0, + glyphCount: 1, + glyphIndices: &(glyph_id.0 as u16), + glyphAdvances: &0.0, + glyphOffsets: &DWRITE_GLYPH_OFFSET { + advanceOffset: 0.0, + ascenderOffset: 0.0, + }, + isSideways: BOOL(0), + bidiLevel: 0, + }; + unsafe { + factory.TranslateColorGlyphRun( + D2D_POINT_2F::default(), + &glyph_run as _, + None, + DWRITE_GLYPH_IMAGE_FORMATS_COLR + | DWRITE_GLYPH_IMAGE_FORMATS_SVG + | DWRITE_GLYPH_IMAGE_FORMATS_PNG + | DWRITE_GLYPH_IMAGE_FORMATS_JPEG + | DWRITE_GLYPH_IMAGE_FORMATS_PREMULTIPLIED_B8G8R8A8, + DWRITE_MEASURING_MODE_NATURAL, + None, + 0, + ) + } + .is_ok() +} + const DEFAULT_LOCALE_NAME: PCWSTR = windows::core::w!("en-US"); const BRUSH_COLOR: D2D1_COLOR_F = D2D1_COLOR_F { r: 1.0, From 8074fba76b4352077fed94364fcfb9d095f177a9 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Thu, 19 Sep 2024 12:31:40 -0400 Subject: [PATCH 222/762] Update List to support UI Density (#18079) Tracking issue: #18078 Improve UI Density support for List. UI density is an unstable feature. You can read more about it in the above issue! | Before Normal - Before Dense - After Normal - After Dense | |--------------------------------------------------------| | ![Group 8](https://github.com/user-attachments/assets/bb896fcf-e4a6-4776-9308-1405906d2dbe) | | | | | Before Normal - Before Dense - After Normal - After Dense | |--------------------------------------------------------| | ![Group 9](https://github.com/user-attachments/assets/00815a1b-071b-4d02-96bc-36bf37b5ae8b) | Release Notes: - N/A --- crates/ui/src/components/list/list.rs | 12 ++-- crates/ui/src/components/list/list_header.rs | 15 +++-- crates/ui/src/components/list/list_item.rs | 8 +-- .../ui/src/components/list/list_separator.rs | 2 +- .../ui/src/components/list/list_sub_header.rs | 57 +++++++++++-------- 5 files changed, 55 insertions(+), 39 deletions(-) diff --git a/crates/ui/src/components/list/list.rs b/crates/ui/src/components/list/list.rs index a09abf92e45cd4..4bf157ef4067d8 100644 --- a/crates/ui/src/components/list/list.rs +++ b/crates/ui/src/components/list/list.rs @@ -52,13 +52,15 @@ impl ParentElement for List { } impl RenderOnce for List { - fn render(self, _cx: &mut WindowContext) -> impl IntoElement { - v_flex().w_full().py_1().children(self.header).map(|this| { - match (self.children.is_empty(), self.toggle) { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + v_flex() + .w_full() + .py(Spacing::Small.rems(cx)) + .children(self.header) + .map(|this| match (self.children.is_empty(), self.toggle) { (false, _) => this.children(self.children), (true, Some(false)) => this, (true, _) => this.child(Label::new(self.empty_message.clone()).color(Color::Muted)), - } - }) + }) } } diff --git a/crates/ui/src/components/list/list_header.rs b/crates/ui/src/components/list/list_header.rs index 4377efbc46772e..3b15f8cd3dd090 100644 --- a/crates/ui/src/components/list/list_header.rs +++ b/crates/ui/src/components/list/list_header.rs @@ -2,6 +2,8 @@ use std::sync::Arc; use crate::{h_flex, prelude::*, Disclosure, Label}; use gpui::{AnyElement, ClickEvent}; +use settings::Settings; +use theme::ThemeSettings; #[derive(IntoElement)] pub struct ListHeader { @@ -78,6 +80,8 @@ impl Selectable for ListHeader { impl RenderOnce for ListHeader { fn render(self, cx: &mut WindowContext) -> impl IntoElement { + let ui_density = ThemeSettings::get_global(cx).ui_density; + h_flex() .id(self.label.clone()) .w_full() @@ -85,7 +89,10 @@ impl RenderOnce for ListHeader { .group("list_header") .child( div() - .h_7() + .map(|this| match ui_density { + theme::UiDensity::Comfortable => this.h_5(), + _ => this.h_7(), + }) .when(self.inset, |this| this.px_2()) .when(self.selected, |this| { this.bg(cx.theme().colors().ghost_element_selected) @@ -95,10 +102,10 @@ impl RenderOnce for ListHeader { .items_center() .justify_between() .w_full() - .gap_1() + .gap(Spacing::Small.rems(cx)) .child( h_flex() - .gap_1() + .gap(Spacing::Small.rems(cx)) .children(self.toggle.map(|is_open| { Disclosure::new("toggle", is_open).on_toggle(self.on_toggle.clone()) })) @@ -106,7 +113,7 @@ impl RenderOnce for ListHeader { div() .id("label_container") .flex() - .gap_1() + .gap(Spacing::Small.rems(cx)) .items_center() .children(self.start_slot) .child(Label::new(self.label.clone()).color(Color::Muted)) diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index 6b38b7f963fee3..e1c90894fdd3d2 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -162,7 +162,7 @@ impl RenderOnce for ListItem { // When an item is inset draw the indent spacing outside of the item .when(self.inset, |this| { this.ml(self.indent_level as f32 * self.indent_step_size) - .px_1() + .px(Spacing::Small.rems(cx)) }) .when(!self.inset && !self.disabled, |this| { this @@ -185,7 +185,7 @@ impl RenderOnce for ListItem { .w_full() .relative() .gap_1() - .px_1p5() + .px(Spacing::Medium.rems(cx)) .map(|this| match self.spacing { ListItemSpacing::Dense => this, ListItemSpacing::Sparse => this.py_1(), @@ -238,7 +238,7 @@ impl RenderOnce for ListItem { .flex_grow() .flex_shrink_0() .flex_basis(relative(0.25)) - .gap_1() + .gap(Spacing::Small.rems(cx)) .overflow_hidden() .children(self.start_slot) .children(self.children), @@ -260,7 +260,7 @@ impl RenderOnce for ListItem { h_flex() .h_full() .absolute() - .right_1p5() + .right(Spacing::Medium.rems(cx)) .top_0() .visible_on_hover("list_item") .child(end_hover_slot), diff --git a/crates/ui/src/components/list/list_separator.rs b/crates/ui/src/components/list/list_separator.rs index b53dc7a0433d6b..0d5fdf8d494fbe 100644 --- a/crates/ui/src/components/list/list_separator.rs +++ b/crates/ui/src/components/list/list_separator.rs @@ -8,7 +8,7 @@ impl RenderOnce for ListSeparator { div() .h_px() .w_full() - .my_1p5() + .my(Spacing::Medium.rems(cx)) .bg(cx.theme().colors().border_variant) } } diff --git a/crates/ui/src/components/list/list_sub_header.rs b/crates/ui/src/components/list/list_sub_header.rs index 2aa9387129beb2..0ed072ebbf2c90 100644 --- a/crates/ui/src/components/list/list_sub_header.rs +++ b/crates/ui/src/components/list/list_sub_header.rs @@ -39,30 +39,37 @@ impl Selectable for ListSubHeader { impl RenderOnce for ListSubHeader { fn render(self, cx: &mut WindowContext) -> impl IntoElement { - h_flex().flex_1().w_full().relative().pb_1().px_0p5().child( - div() - .h_6() - .when(self.inset, |this| this.px_2()) - .when(self.selected, |this| { - this.bg(cx.theme().colors().ghost_element_selected) - }) - .flex() - .flex_1() - .w_full() - .gap_1() - .items_center() - .justify_between() - .child( - div() - .flex() - .gap_1() - .items_center() - .children( - self.start_slot - .map(|i| Icon::new(i).color(Color::Muted).size(IconSize::Small)), - ) - .child(Label::new(self.label.clone()).color(Color::Muted)), - ), - ) + h_flex() + .flex_1() + .w_full() + .relative() + .pb(Spacing::Small.rems(cx)) + .px(Spacing::XSmall.rems(cx)) + .child( + div() + .h_6() + .when(self.inset, |this| this.px_2()) + .when(self.selected, |this| { + this.bg(cx.theme().colors().ghost_element_selected) + }) + .flex() + .flex_1() + .w_full() + .gap_1() + .items_center() + .justify_between() + .child( + div() + .flex() + .gap_1() + .items_center() + .children( + self.start_slot.map(|i| { + Icon::new(i).color(Color::Muted).size(IconSize::Small) + }), + ) + .child(Label::new(self.label.clone()).color(Color::Muted)), + ), + ) } } From 1fc391f696a828780b6a651df0b797be91aee91e Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 19 Sep 2024 13:14:15 -0400 Subject: [PATCH 223/762] Make `Buffer::apply_ops` infallible (#18089) This PR makes the `Buffer::apply_ops` method infallible for `text::Buffer` and `language::Buffer`. We discovered that `text::Buffer::apply_ops` was only fallible due to `apply_undo`, which didn't actually need to be fallible. Release Notes: - N/A --- crates/assistant/src/context.rs | 8 ++-- crates/assistant/src/context/context_tests.rs | 8 +--- crates/assistant/src/context_store.rs | 6 +-- crates/channel/src/channel_buffer.rs | 4 +- crates/channel/src/channel_store.rs | 2 +- crates/collab/src/db/queries/buffers.rs | 4 +- crates/collab/src/db/tests/buffer_tests.rs | 18 ++++---- crates/language/src/buffer.rs | 5 +- crates/language/src/buffer_tests.rs | 46 ++++++++----------- crates/multi_buffer/src/multi_buffer.rs | 12 ++--- crates/project/src/buffer_store.rs | 9 ++-- crates/text/src/tests.rs | 32 ++++++------- crates/text/src/text.rs | 39 +++++++--------- 13 files changed, 85 insertions(+), 108 deletions(-) diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index d72b04e3cddb17..830c0980491f7c 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -683,7 +683,7 @@ impl Context { buffer.set_text(saved_context.text.as_str(), cx) }); let operations = saved_context.into_ops(&this.buffer, cx); - this.apply_ops(operations, cx).unwrap(); + this.apply_ops(operations, cx); this } @@ -756,7 +756,7 @@ impl Context { &mut self, ops: impl IntoIterator, cx: &mut ModelContext, - ) -> Result<()> { + ) { let mut buffer_ops = Vec::new(); for op in ops { match op { @@ -765,10 +765,8 @@ impl Context { } } self.buffer - .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?; + .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx)); self.flush_ops(cx); - - Ok(()) } fn flush_ops(&mut self, cx: &mut ModelContext) { diff --git a/crates/assistant/src/context/context_tests.rs b/crates/assistant/src/context/context_tests.rs index 842ac050786344..2d6a2894c9521a 100644 --- a/crates/assistant/src/context/context_tests.rs +++ b/crates/assistant/src/context/context_tests.rs @@ -1166,9 +1166,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std ); network.lock().broadcast(replica_id, ops_to_send); - context - .update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)) - .unwrap(); + context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)); } else if rng.gen_bool(0.1) && replica_id != 0 { log::info!("Context {}: disconnecting", context_index); network.lock().disconnect_peer(replica_id); @@ -1180,9 +1178,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std .map(ContextOperation::from_proto) .collect::>>() .unwrap(); - context - .update(cx, |context, cx| context.apply_ops(ops, cx)) - .unwrap(); + context.update(cx, |context, cx| context.apply_ops(ops, cx)); } } } diff --git a/crates/assistant/src/context_store.rs b/crates/assistant/src/context_store.rs index 867d9067914854..f57a2fbca613c3 100644 --- a/crates/assistant/src/context_store.rs +++ b/crates/assistant/src/context_store.rs @@ -223,7 +223,7 @@ impl ContextStore { if let Some(context) = this.loaded_context_for_id(&context_id, cx) { let operation_proto = envelope.payload.operation.context("invalid operation")?; let operation = ContextOperation::from_proto(operation_proto)?; - context.update(cx, |context, cx| context.apply_ops([operation], cx))?; + context.update(cx, |context, cx| context.apply_ops([operation], cx)); } Ok(()) })? @@ -394,7 +394,7 @@ impl ContextStore { .collect::>>() }) .await?; - context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??; + context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; this.update(&mut cx, |this, cx| { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { existing_context @@ -531,7 +531,7 @@ impl ContextStore { .collect::>>() }) .await?; - context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??; + context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; this.update(&mut cx, |this, cx| { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { existing_context diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index df3e66483f873f..755e7400e1b66e 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -66,7 +66,7 @@ impl ChannelBuffer { let capability = channel_store.read(cx).channel_capability(channel.id); language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text) })?; - buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??; + buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; let subscription = client.subscribe_to_entity(channel.id.0)?; @@ -151,7 +151,7 @@ impl ChannelBuffer { cx.notify(); this.buffer .update(cx, |buffer, cx| buffer.apply_ops(ops, cx)) - })??; + })?; Ok(()) } diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index 9bd5fd564f29dc..fc5b12cfae1c39 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -1007,7 +1007,7 @@ impl ChannelStore { .into_iter() .map(language::proto::deserialize_operation) .collect::>>()?; - buffer.apply_ops(incoming_operations, cx)?; + buffer.apply_ops(incoming_operations, cx); anyhow::Ok(outgoing_operations) }) .log_err(); diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 7b19dee315476d..06ad2b45946511 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -689,9 +689,7 @@ impl Database { } let mut text_buffer = text::Buffer::new(0, text::BufferId::new(1).unwrap(), base_text); - text_buffer - .apply_ops(operations.into_iter().filter_map(operation_from_wire)) - .unwrap(); + text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire)); let base_text = text_buffer.text(); let epoch = buffer.epoch + 1; diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index 55a8f216c49406..adc571580a0724 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -96,16 +96,14 @@ async fn test_channel_buffers(db: &Arc) { text::BufferId::new(1).unwrap(), buffer_response_b.base_text, ); - buffer_b - .apply_ops(buffer_response_b.operations.into_iter().map(|operation| { - let operation = proto::deserialize_operation(operation).unwrap(); - if let language::Operation::Buffer(operation) = operation { - operation - } else { - unreachable!() - } - })) - .unwrap(); + buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| { + let operation = proto::deserialize_operation(operation).unwrap(); + if let language::Operation::Buffer(operation) = operation { + operation + } else { + unreachable!() + } + })); assert_eq!(buffer_b.text(), "hello, cruel world"); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 43fe1565acb796..08fc1ccdb45d5b 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1972,7 +1972,7 @@ impl Buffer { &mut self, ops: I, cx: &mut ModelContext, - ) -> Result<()> { + ) { self.pending_autoindent.take(); let was_dirty = self.is_dirty(); let old_version = self.version.clone(); @@ -1991,14 +1991,13 @@ impl Buffer { } }) .collect::>(); - self.text.apply_ops(buffer_ops)?; + self.text.apply_ops(buffer_ops); self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops(cx); self.did_edit(&old_version, was_dirty, cx); // Notify independently of whether the buffer was edited as the operations could include a // selection update. cx.notify(); - Ok(()) } fn flush_deferred_ops(&mut self, cx: &mut ModelContext) { diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 50dea8d2562b00..23faa33316da79 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -308,7 +308,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) { // Incorporating a set of remote ops emits a single edited event, // followed by a dirty changed event. buffer2.update(cx, |buffer, cx| { - buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap(); + buffer.apply_ops(buffer1_ops.lock().drain(..), cx); }); assert_eq!( mem::take(&mut *buffer_1_events.lock()), @@ -332,7 +332,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) { // Incorporating the remote ops again emits a single edited event, // followed by a dirty changed event. buffer2.update(cx, |buffer, cx| { - buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap(); + buffer.apply_ops(buffer1_ops.lock().drain(..), cx); }); assert_eq!( mem::take(&mut *buffer_1_events.lock()), @@ -2274,13 +2274,11 @@ fn test_serialization(cx: &mut gpui::AppContext) { .block(buffer1.read(cx).serialize_ops(None, cx)); let buffer2 = cx.new_model(|cx| { let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); - buffer - .apply_ops( - ops.into_iter() - .map(|op| proto::deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| proto::deserialize_operation(op).unwrap()), + cx, + ); buffer }); assert_eq!(buffer2.read(cx).text(), "abcDF"); @@ -2401,13 +2399,11 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { .block(base_buffer.read(cx).serialize_ops(None, cx)); let mut buffer = Buffer::from_proto(i as ReplicaId, Capability::ReadWrite, state, None).unwrap(); - buffer - .apply_ops( - ops.into_iter() - .map(|op| proto::deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| proto::deserialize_operation(op).unwrap()), + cx, + ); buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.handle(), move |buffer, _, event, _| { @@ -2523,14 +2519,12 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { None, ) .unwrap(); - new_buffer - .apply_ops( - old_buffer_ops - .into_iter() - .map(|op| deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + new_buffer.apply_ops( + old_buffer_ops + .into_iter() + .map(|op| deserialize_operation(op).unwrap()), + cx, + ); log::info!( "New replica {} text: {:?}", new_buffer.replica_id(), @@ -2570,7 +2564,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { ops ); new_buffer.update(cx, |new_buffer, cx| { - new_buffer.apply_ops(ops, cx).unwrap(); + new_buffer.apply_ops(ops, cx); }); } } @@ -2598,7 +2592,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { ops.len(), ops ); - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap()); + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } } _ => {} diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 9dee092dea9f29..29bd9a80682a10 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5019,13 +5019,11 @@ mod tests { .background_executor() .block(host_buffer.read(cx).serialize_ops(None, cx)); let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); - buffer - .apply_ops( - ops.into_iter() - .map(|op| language::proto::deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| language::proto::deserialize_operation(op).unwrap()), + cx, + ); buffer }); let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(guest_buffer.clone(), cx)); diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index ead32359970e25..0045aba2e89ecf 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -644,7 +644,7 @@ impl BufferStore { } hash_map::Entry::Occupied(mut entry) => { if let OpenBuffer::Operations(operations) = entry.get_mut() { - buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?; + buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx)); } else if entry.get().upgrade().is_some() { if is_remote { return Ok(()); @@ -1051,12 +1051,12 @@ impl BufferStore { match this.opened_buffers.entry(buffer_id) { hash_map::Entry::Occupied(mut e) => match e.get_mut() { OpenBuffer::Strong(buffer) => { - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops), OpenBuffer::Weak(buffer) => { if let Some(buffer) = buffer.upgrade() { - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } } }, @@ -1217,7 +1217,8 @@ impl BufferStore { .into_iter() .map(language::proto::deserialize_operation) .collect::>>()?; - buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)) + buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)); + anyhow::Ok(()) }); if let Err(error) = result { diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 6f748fb5880b31..8c5d7014eebda0 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -515,25 +515,25 @@ fn test_undo_redo() { let entries = buffer.history.undo_stack.clone(); assert_eq!(entries.len(), 3); - buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()); assert_eq!(buffer.text(), "1cdef234"); - buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()); assert_eq!(buffer.text(), "1abcdx234"); - buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()); assert_eq!(buffer.text(), "1abx234"); - buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()); assert_eq!(buffer.text(), "1yzef234"); - buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()); assert_eq!(buffer.text(), "1234"); } @@ -692,12 +692,12 @@ fn test_concurrent_edits() { let buf3_op = buffer3.edit([(5..6, "56")]); assert_eq!(buffer3.text(), "abcde56"); - buffer1.apply_op(buf2_op.clone()).unwrap(); - buffer1.apply_op(buf3_op.clone()).unwrap(); - buffer2.apply_op(buf1_op.clone()).unwrap(); - buffer2.apply_op(buf3_op).unwrap(); - buffer3.apply_op(buf1_op).unwrap(); - buffer3.apply_op(buf2_op).unwrap(); + buffer1.apply_op(buf2_op.clone()); + buffer1.apply_op(buf3_op.clone()); + buffer2.apply_op(buf1_op.clone()); + buffer2.apply_op(buf3_op); + buffer3.apply_op(buf1_op); + buffer3.apply_op(buf2_op); assert_eq!(buffer1.text(), "a12c34e56"); assert_eq!(buffer2.text(), "a12c34e56"); @@ -756,7 +756,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { replica_id, ops.len() ); - buffer.apply_ops(ops).unwrap(); + buffer.apply_ops(ops); } } _ => {} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 9630ec5b80334c..8d2cd97aacaaee 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -38,7 +38,6 @@ pub use subscription::*; pub use sum_tree::Bias; use sum_tree::{FilterCursor, SumTree, TreeMap}; use undo_map::UndoMap; -use util::ResultExt; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; @@ -927,23 +926,22 @@ impl Buffer { self.snapshot.line_ending = line_ending; } - pub fn apply_ops>(&mut self, ops: I) -> Result<()> { + pub fn apply_ops>(&mut self, ops: I) { let mut deferred_ops = Vec::new(); for op in ops { self.history.push(op.clone()); if self.can_apply_op(&op) { - self.apply_op(op)?; + self.apply_op(op); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); } } self.deferred_ops.insert(deferred_ops); - self.flush_deferred_ops()?; - Ok(()) + self.flush_deferred_ops(); } - fn apply_op(&mut self, op: Operation) -> Result<()> { + fn apply_op(&mut self, op: Operation) { match op { Operation::Edit(edit) => { if !self.version.observed(edit.timestamp) { @@ -960,7 +958,7 @@ impl Buffer { } Operation::Undo(undo) => { if !self.version.observed(undo.timestamp) { - self.apply_undo(&undo)?; + self.apply_undo(&undo); self.snapshot.version.observe(undo.timestamp); self.lamport_clock.observe(undo.timestamp); } @@ -974,7 +972,6 @@ impl Buffer { true } }); - Ok(()) } fn apply_remote_edit( @@ -1217,7 +1214,7 @@ impl Buffer { fragment_ids } - fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> { + fn apply_undo(&mut self, undo: &UndoOperation) { self.snapshot.undo_map.insert(undo); let mut edits = Patch::default(); @@ -1268,22 +1265,20 @@ impl Buffer { self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; self.subscriptions.publish_mut(&edits); - Ok(()) } - fn flush_deferred_ops(&mut self) -> Result<()> { + fn flush_deferred_ops(&mut self) { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); for op in self.deferred_ops.drain().iter().cloned() { if self.can_apply_op(&op) { - self.apply_op(op)?; + self.apply_op(op); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); } } self.deferred_ops.insert(deferred_ops); - Ok(()) } fn can_apply_op(&self, op: &Operation) -> bool { @@ -1352,7 +1347,7 @@ impl Buffer { if let Some(entry) = self.history.pop_undo() { let transaction = entry.transaction.clone(); let transaction_id = transaction.id; - let op = self.undo_or_redo(transaction).unwrap(); + let op = self.undo_or_redo(transaction); Some((transaction_id, op)) } else { None @@ -1365,7 +1360,7 @@ impl Buffer { .remove_from_undo(transaction_id)? .transaction .clone(); - self.undo_or_redo(transaction).log_err() + Some(self.undo_or_redo(transaction)) } pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec { @@ -1378,7 +1373,7 @@ impl Buffer { transactions .into_iter() - .map(|transaction| self.undo_or_redo(transaction).unwrap()) + .map(|transaction| self.undo_or_redo(transaction)) .collect() } @@ -1394,7 +1389,7 @@ impl Buffer { if let Some(entry) = self.history.pop_redo() { let transaction = entry.transaction.clone(); let transaction_id = transaction.id; - let op = self.undo_or_redo(transaction).unwrap(); + let op = self.undo_or_redo(transaction); Some((transaction_id, op)) } else { None @@ -1411,11 +1406,11 @@ impl Buffer { transactions .into_iter() - .map(|transaction| self.undo_or_redo(transaction).unwrap()) + .map(|transaction| self.undo_or_redo(transaction)) .collect() } - fn undo_or_redo(&mut self, transaction: Transaction) -> Result { + fn undo_or_redo(&mut self, transaction: Transaction) -> Operation { let mut counts = HashMap::default(); for edit_id in transaction.edit_ids { counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); @@ -1426,11 +1421,11 @@ impl Buffer { version: self.version(), counts, }; - self.apply_undo(&undo)?; + self.apply_undo(&undo); self.snapshot.version.observe(undo.timestamp); let operation = Operation::Undo(undo); self.history.push(operation.clone()); - Ok(operation) + operation } pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) { @@ -1762,7 +1757,7 @@ impl Buffer { self.replica_id, transaction ); - ops.push(self.undo_or_redo(transaction).unwrap()); + ops.push(self.undo_or_redo(transaction)); } } ops From 27c1106fadef97dc56d17d9359fd7514b71c8643 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 19 Sep 2024 13:26:14 -0400 Subject: [PATCH 224/762] Fix bug where copying from assistant panel appends extra newline to clipboard (#18090) Closes https://github.com/zed-industries/zed/issues/17661 Release Notes: - Fixed a bug where copying from the assistant panel appended an additional newline to the end of the clipboard contents. --- crates/assistant/src/assistant_panel.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index da176ebeee61e0..364c6f9663120c 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -3533,7 +3533,9 @@ impl ContextEditor { for chunk in context.buffer().read(cx).text_for_range(range) { text.push_str(chunk); } - text.push('\n'); + if message.offset_range.end < selection.range().end { + text.push('\n'); + } } } } From 00b1c81c9f8f209667140036da4c9ac578031546 Mon Sep 17 00:00:00 2001 From: David Soria Parra <167242713+dsp-ant@users.noreply.github.com> Date: Thu, 19 Sep 2024 20:51:48 +0100 Subject: [PATCH 225/762] context_servers: Remove context_type from ResourceContent (#18097) This is removed in the protocol Release Notes: - N/A --- crates/context_servers/src/types.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/context_servers/src/types.rs b/crates/context_servers/src/types.rs index c0e9a79f1589c5..cd95ecd7adb368 100644 --- a/crates/context_servers/src/types.rs +++ b/crates/context_servers/src/types.rs @@ -239,7 +239,6 @@ pub struct Resource { pub struct ResourceContent { pub uri: Url, pub mime_type: Option, - pub content_type: String, pub text: Option, pub data: Option, } From fbbf0393cbe9b2094bbdd496a5d5d15419eeaeb3 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 19 Sep 2024 14:04:46 -0600 Subject: [PATCH 226/762] ssh-remoting: Fix go to definition out of worktree (#18094) Release Notes: - ssh-remoting: Fixed go to definition outside of worktree --------- Co-authored-by: Mikayla --- crates/client/src/client.rs | 4 + crates/project/src/lsp_store.rs | 21 ++- crates/project/src/project.rs | 181 ++++++++----------- crates/project/src/worktree_store.rs | 173 ++++++++++++++---- crates/remote/src/ssh_session.rs | 7 +- crates/remote_server/src/headless_project.rs | 17 +- crates/rpc/src/proto_client.rs | 6 + 7 files changed, 251 insertions(+), 158 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 09286300d9d38f..a8387f7c5ac9ba 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1621,6 +1621,10 @@ impl ProtoClient for Client { fn message_handler_set(&self) -> &parking_lot::Mutex { &self.handler_set } + + fn goes_via_collab(&self) -> bool { + true + } } #[derive(Serialize, Deserialize)] diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 58d9ba8926737d..5c32c9030db3ff 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -534,6 +534,9 @@ impl LspStore { } WorktreeStoreEvent::WorktreeRemoved(_, id) => self.remove_worktree(*id, cx), WorktreeStoreEvent::WorktreeOrderChanged => {} + WorktreeStoreEvent::WorktreeUpdateSent(worktree) => { + worktree.update(cx, |worktree, _cx| self.send_diagnostic_summaries(worktree)); + } } } @@ -764,24 +767,22 @@ impl LspStore { self.active_entry = active_entry; } - pub(crate) fn send_diagnostic_summaries( - &self, - worktree: &mut Worktree, - ) -> Result<(), anyhow::Error> { + pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) { if let Some(client) = self.downstream_client.clone() { if let Some(summaries) = self.diagnostic_summaries.get(&worktree.id()) { for (path, summaries) in summaries { for (&server_id, summary) in summaries { - client.send(proto::UpdateDiagnosticSummary { - project_id: self.project_id, - worktree_id: worktree.id().to_proto(), - summary: Some(summary.to_proto(server_id, path)), - })?; + client + .send(proto::UpdateDiagnosticSummary { + project_id: self.project_id, + worktree_id: worktree.id().to_proto(), + summary: Some(summary.to_proto(server_id, path)), + }) + .log_err(); } } } } - Ok(()) } pub fn request_lsp( diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index f4816cf0cde66f..fcf10d11c2cca7 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -31,7 +31,7 @@ pub use environment::ProjectEnvironment; use futures::{ channel::mpsc::{self, UnboundedReceiver}, future::try_join_all, - AsyncWriteExt, FutureExt, StreamExt, + AsyncWriteExt, StreamExt, }; use git::{blame::Blame, repository::GitRepository}; @@ -152,7 +152,7 @@ pub struct Project { _subscriptions: Vec, buffers_needing_diff: HashSet>, git_diff_debouncer: DebouncedDelay, - remotely_created_buffers: Arc>, + remotely_created_models: Arc>, terminals: Terminals, node: Option>, tasks: Model, @@ -169,26 +169,28 @@ pub struct Project { } #[derive(Default)] -struct RemotelyCreatedBuffers { +struct RemotelyCreatedModels { + worktrees: Vec>, buffers: Vec>, retain_count: usize, } -struct RemotelyCreatedBufferGuard { - remote_buffers: std::sync::Weak>, +struct RemotelyCreatedModelGuard { + remote_models: std::sync::Weak>, } -impl Drop for RemotelyCreatedBufferGuard { +impl Drop for RemotelyCreatedModelGuard { fn drop(&mut self) { - if let Some(remote_buffers) = self.remote_buffers.upgrade() { - let mut remote_buffers = remote_buffers.lock(); + if let Some(remote_models) = self.remote_models.upgrade() { + let mut remote_models = remote_models.lock(); assert!( - remote_buffers.retain_count > 0, - "RemotelyCreatedBufferGuard dropped too many times" + remote_models.retain_count > 0, + "RemotelyCreatedModelGuard dropped too many times" ); - remote_buffers.retain_count -= 1; - if remote_buffers.retain_count == 0 { - remote_buffers.buffers.clear(); + remote_models.retain_count -= 1; + if remote_models.retain_count == 0 { + remote_models.buffers.clear(); + remote_models.worktrees.clear(); } } } @@ -620,7 +622,7 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let worktree_store = cx.new_model(|_| WorktreeStore::new(false, fs.clone())); + let worktree_store = cx.new_model(|_| WorktreeStore::new(None, false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -687,7 +689,7 @@ impl Project { dev_server_project_id: None, search_history: Self::new_search_history(), environment, - remotely_created_buffers: Default::default(), + remotely_created_models: Default::default(), last_formatting_failure: None, buffers_being_formatted: Default::default(), search_included_history: Self::new_search_history(), @@ -714,11 +716,8 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let worktree_store = cx.new_model(|_| { - let mut worktree_store = WorktreeStore::new(false, fs.clone()); - worktree_store.set_upstream_client(ssh.clone().into()); - worktree_store - }); + let worktree_store = + cx.new_model(|_| WorktreeStore::new(Some(ssh.clone().into()), false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -773,7 +772,7 @@ impl Project { dev_server_project_id: None, search_history: Self::new_search_history(), environment, - remotely_created_buffers: Default::default(), + remotely_created_models: Default::default(), last_formatting_failure: None, buffers_being_formatted: Default::default(), search_included_history: Self::new_search_history(), @@ -787,8 +786,9 @@ impl Project { ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.worktree_store); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.lsp_store); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.settings_observer); - client.add_model_message_handler(Self::handle_update_worktree); client.add_model_message_handler(Self::handle_create_buffer_for_peer); + client.add_model_message_handler(Self::handle_update_worktree); + client.add_model_message_handler(Self::handle_update_project); client.add_model_request_handler(BufferStore::handle_update_buffer); BufferStore::init(&client); LspStore::init(&client); @@ -867,8 +867,7 @@ impl Project { let role = response.payload.role(); let worktree_store = cx.new_model(|_| { - let mut store = WorktreeStore::new(true, fs.clone()); - store.set_upstream_client(client.clone().into()); + let mut store = WorktreeStore::new(Some(client.clone().into()), true, fs.clone()); if let Some(dev_server_project_id) = response.payload.dev_server_project_id { store.set_dev_server_project_id(DevServerProjectId(dev_server_project_id)); } @@ -955,7 +954,7 @@ impl Project { search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), environment: ProjectEnvironment::new(&worktree_store, None, cx), - remotely_created_buffers: Arc::new(Mutex::new(RemotelyCreatedBuffers::default())), + remotely_created_models: Arc::new(Mutex::new(RemotelyCreatedModels::default())), last_formatting_failure: None, buffers_being_formatted: Default::default(), }; @@ -1259,43 +1258,6 @@ impl Project { } } - fn metadata_changed(&mut self, cx: &mut ModelContext) { - cx.notify(); - - let ProjectClientState::Shared { remote_id } = self.client_state else { - return; - }; - let project_id = remote_id; - - let update_project = self.client.request(proto::UpdateProject { - project_id, - worktrees: self.worktree_metadata_protos(cx), - }); - cx.spawn(|this, mut cx| async move { - update_project.await?; - this.update(&mut cx, |this, cx| { - let client = this.client.clone(); - let worktrees = this.worktree_store.read(cx).worktrees().collect::>(); - - for worktree in worktrees { - worktree.update(cx, |worktree, cx| { - let client = client.clone(); - worktree.observe_updates(project_id, cx, { - move |update| client.request(update).map(|result| result.is_ok()) - }); - - this.lsp_store.update(cx, |lsp_store, _| { - lsp_store.send_diagnostic_summaries(worktree) - }) - })?; - } - - anyhow::Ok(()) - }) - }) - .detach_and_log_err(cx); - } - pub fn task_inventory(&self) -> &Model { &self.tasks } @@ -1513,7 +1475,7 @@ impl Project { buffer_store.shared(project_id, self.client.clone().into(), cx) }); self.worktree_store.update(cx, |worktree_store, cx| { - worktree_store.set_shared(true, cx); + worktree_store.shared(project_id, self.client.clone().into(), cx); }); self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.shared(project_id, self.client.clone().into(), cx) @@ -1526,7 +1488,6 @@ impl Project { remote_id: project_id, }; - self.metadata_changed(cx); cx.emit(Event::RemoteIdChanged(Some(project_id))); cx.notify(); Ok(()) @@ -1540,7 +1501,11 @@ impl Project { self.buffer_store .update(cx, |buffer_store, _| buffer_store.forget_shared_buffers()); self.set_collaborators_from_proto(message.collaborators, cx)?; - self.metadata_changed(cx); + + self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.send_project_updates(cx); + }); + cx.notify(); cx.emit(Event::Reshared); Ok(()) } @@ -1576,7 +1541,6 @@ impl Project { pub fn unshare(&mut self, cx: &mut ModelContext) -> Result<()> { self.unshare_internal(cx)?; - self.metadata_changed(cx); cx.notify(); Ok(()) } @@ -1598,7 +1562,7 @@ impl Project { self.collaborators.clear(); self.client_subscriptions.clear(); self.worktree_store.update(cx, |store, cx| { - store.set_shared(false, cx); + store.unshared(cx); }); self.buffer_store.update(cx, |buffer_store, cx| { buffer_store.forget_shared_buffers(); @@ -1867,9 +1831,9 @@ impl Project { cx: &mut ModelContext, ) -> Result<()> { { - let mut remotely_created_buffers = self.remotely_created_buffers.lock(); - if remotely_created_buffers.retain_count > 0 { - remotely_created_buffers.buffers.push(buffer.clone()) + let mut remotely_created_models = self.remotely_created_models.lock(); + if remotely_created_models.retain_count > 0 { + remotely_created_models.buffers.push(buffer.clone()) } } @@ -2110,10 +2074,17 @@ impl Project { cx.emit(Event::WorktreeRemoved(*id)); } WorktreeStoreEvent::WorktreeOrderChanged => cx.emit(Event::WorktreeOrderChanged), + WorktreeStoreEvent::WorktreeUpdateSent(_) => {} } } fn on_worktree_added(&mut self, worktree: &Model, cx: &mut ModelContext) { + { + let mut remotely_created_models = self.remotely_created_models.lock(); + if remotely_created_models.retain_count > 0 { + remotely_created_models.worktrees.push(worktree.clone()) + } + } cx.observe(worktree, |_, _, cx| cx.notify()).detach(); cx.subscribe(worktree, |this, worktree, event, cx| { let is_local = worktree.read(cx).is_local(); @@ -2140,7 +2111,7 @@ impl Project { } }) .detach(); - self.metadata_changed(cx); + cx.notify(); } fn on_worktree_removed(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { @@ -2171,7 +2142,7 @@ impl Project { inventory.remove_worktree_sources(id_to_remove); }); - self.metadata_changed(cx); + cx.notify(); } fn on_buffer_event( @@ -3012,7 +2983,7 @@ impl Project { #[inline(never)] fn definition_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3025,7 +2996,7 @@ impl Project { ) } pub fn definition( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3035,7 +3006,7 @@ impl Project { } fn declaration_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3049,7 +3020,7 @@ impl Project { } pub fn declaration( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3059,7 +3030,7 @@ impl Project { } fn type_definition_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3073,7 +3044,7 @@ impl Project { } pub fn type_definition( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3083,7 +3054,7 @@ impl Project { } pub fn implementation( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3098,7 +3069,7 @@ impl Project { } pub fn references( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3113,7 +3084,7 @@ impl Project { } fn document_highlights_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3127,7 +3098,7 @@ impl Project { } pub fn document_highlights( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3514,7 +3485,7 @@ impl Project { query: Some(query.to_proto()), limit: limit as _, }); - let guard = self.retain_remotely_created_buffers(cx); + let guard = self.retain_remotely_created_models(cx); cx.spawn(move |this, mut cx| async move { let response = request.await?; @@ -3536,7 +3507,7 @@ impl Project { } pub fn request_lsp( - &self, + &mut self, buffer_handle: Model, server: LanguageServerToQuery, request: R, @@ -3546,8 +3517,14 @@ impl Project { ::Result: Send, ::Params: Send, { - self.lsp_store.update(cx, |lsp_store, cx| { + let guard = self.retain_remotely_created_models(cx); + let task = self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.request_lsp(buffer_handle, server, request, cx) + }); + cx.spawn(|_, _| async move { + let result = task.await; + drop(guard); + result }) } @@ -4095,6 +4072,7 @@ impl Project { })? } + // Collab sends UpdateWorktree protos as messages async fn handle_update_worktree( this: Model, envelope: TypedEnvelope, @@ -4130,19 +4108,21 @@ impl Project { BufferStore::handle_update_buffer(buffer_store, envelope, cx).await } - fn retain_remotely_created_buffers( + fn retain_remotely_created_models( &mut self, cx: &mut ModelContext, - ) -> RemotelyCreatedBufferGuard { + ) -> RemotelyCreatedModelGuard { { - let mut remotely_created_buffers = self.remotely_created_buffers.lock(); - if remotely_created_buffers.retain_count == 0 { - remotely_created_buffers.buffers = self.buffer_store.read(cx).buffers().collect(); + let mut remotely_create_models = self.remotely_created_models.lock(); + if remotely_create_models.retain_count == 0 { + remotely_create_models.buffers = self.buffer_store.read(cx).buffers().collect(); + remotely_create_models.worktrees = + self.worktree_store.read(cx).worktrees().collect(); } - remotely_created_buffers.retain_count += 1; + remotely_create_models.retain_count += 1; } - RemotelyCreatedBufferGuard { - remote_buffers: Arc::downgrade(&self.remotely_created_buffers), + RemotelyCreatedModelGuard { + remote_models: Arc::downgrade(&self.remotely_created_models), } } @@ -4637,16 +4617,11 @@ impl Project { worktrees: Vec, cx: &mut ModelContext, ) -> Result<()> { - self.metadata_changed(cx); - self.worktree_store.update(cx, |worktree_store, cx| { - worktree_store.set_worktrees_from_proto( - worktrees, - self.replica_id(), - self.remote_id().ok_or_else(|| anyhow!("invalid project"))?, - self.client.clone().into(), - cx, - ) - }) + cx.notify(); + let result = self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.set_worktrees_from_proto(worktrees, self.replica_id(), cx) + }); + result } fn set_collaborators_from_proto( diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 07764d4a05ce71..7fae8b9e1dbdd7 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -39,8 +39,10 @@ struct MatchingEntry { pub struct WorktreeStore { next_entry_id: Arc, upstream_client: Option, + downstream_client: Option, + remote_id: u64, dev_server_project_id: Option, - is_shared: bool, + retain_worktrees: bool, worktrees: Vec, worktrees_reordered: bool, #[allow(clippy::type_complexity)] @@ -53,6 +55,7 @@ pub enum WorktreeStoreEvent { WorktreeAdded(Model), WorktreeRemoved(EntityId, WorktreeId), WorktreeOrderChanged, + WorktreeUpdateSent(Model), } impl EventEmitter for WorktreeStore {} @@ -66,23 +69,25 @@ impl WorktreeStore { client.add_model_request_handler(Self::handle_expand_project_entry); } - pub fn new(retain_worktrees: bool, fs: Arc) -> Self { + pub fn new( + upstream_client: Option, + retain_worktrees: bool, + fs: Arc, + ) -> Self { Self { next_entry_id: Default::default(), loading_worktrees: Default::default(), - upstream_client: None, dev_server_project_id: None, - is_shared: retain_worktrees, + downstream_client: None, worktrees: Vec::new(), worktrees_reordered: false, + retain_worktrees, + remote_id: 0, + upstream_client, fs, } } - pub fn set_upstream_client(&mut self, client: AnyProtoClient) { - self.upstream_client = Some(client); - } - pub fn set_dev_server_project_id(&mut self, id: DevServerProjectId) { self.dev_server_project_id = Some(id); } @@ -201,6 +206,13 @@ impl WorktreeStore { path: abs_path.clone(), }) .await?; + + if let Some(existing_worktree) = this.read_with(&cx, |this, cx| { + this.worktree_for_id(WorktreeId::from_proto(response.worktree_id), cx) + })? { + return Ok(existing_worktree); + } + let worktree = cx.update(|cx| { Worktree::remote( 0, @@ -302,7 +314,10 @@ impl WorktreeStore { } pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { - let push_strong_handle = self.is_shared || worktree.read(cx).is_visible(); + let worktree_id = worktree.read(cx).id(); + debug_assert!(!self.worktrees().any(|w| w.read(cx).id() == worktree_id)); + + let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible(); let handle = if push_strong_handle { WorktreeHandle::Strong(worktree.clone()) } else { @@ -322,13 +337,15 @@ impl WorktreeStore { } cx.emit(WorktreeStoreEvent::WorktreeAdded(worktree.clone())); + self.send_project_updates(cx); let handle_id = worktree.entity_id(); - cx.observe_release(worktree, move |_, worktree, cx| { + cx.observe_release(worktree, move |this, worktree, cx| { cx.emit(WorktreeStoreEvent::WorktreeRemoved( handle_id, worktree.id(), )); + this.send_project_updates(cx); }) .detach(); } @@ -349,6 +366,7 @@ impl WorktreeStore { false } }); + self.send_project_updates(cx); } pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) { @@ -359,8 +377,6 @@ impl WorktreeStore { &mut self, worktrees: Vec, replica_id: ReplicaId, - remote_id: u64, - client: AnyProtoClient, cx: &mut ModelContext, ) -> Result<()> { let mut old_worktrees_by_id = self @@ -372,18 +388,31 @@ impl WorktreeStore { }) .collect::>(); + let client = self + .upstream_client + .clone() + .ok_or_else(|| anyhow!("invalid project"))?; + for worktree in worktrees { if let Some(old_worktree) = old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id)) { - self.worktrees.push(WorktreeHandle::Strong(old_worktree)); + let push_strong_handle = + self.retain_worktrees || old_worktree.read(cx).is_visible(); + let handle = if push_strong_handle { + WorktreeHandle::Strong(old_worktree.clone()) + } else { + WorktreeHandle::Weak(old_worktree.downgrade()) + }; + self.worktrees.push(handle); } else { self.add( - &Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx), + &Worktree::remote(self.remote_id, replica_id, worktree, client.clone(), cx), cx, ); } } + self.send_project_updates(cx); Ok(()) } @@ -446,33 +475,109 @@ impl WorktreeStore { } } - pub fn set_shared(&mut self, is_shared: bool, cx: &mut ModelContext) { - self.is_shared = is_shared; + pub fn send_project_updates(&mut self, cx: &mut ModelContext) { + let Some(downstream_client) = self.downstream_client.clone() else { + return; + }; + let project_id = self.remote_id; + + let update = proto::UpdateProject { + project_id, + worktrees: self.worktree_metadata_protos(cx), + }; + + // collab has bad concurrency guarantees, so we send requests in serial. + let update_project = if downstream_client.goes_via_collab() { + Some(downstream_client.request(update)) + } else { + downstream_client.send(update).log_err(); + None + }; + cx.spawn(|this, mut cx| async move { + if let Some(update_project) = update_project { + update_project.await?; + } + + this.update(&mut cx, |this, cx| { + let worktrees = this.worktrees().collect::>(); + + for worktree in worktrees { + worktree.update(cx, |worktree, cx| { + let client = downstream_client.clone(); + worktree.observe_updates(project_id, cx, { + move |update| { + let client = client.clone(); + async move { + if client.goes_via_collab() { + client.request(update).map(|result| result.is_ok()).await + } else { + client.send(update).is_ok() + } + } + } + }); + }); + + cx.emit(WorktreeStoreEvent::WorktreeUpdateSent(worktree.clone())) + } + + anyhow::Ok(()) + }) + }) + .detach_and_log_err(cx); + } + + pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec { + self.worktrees() + .map(|worktree| { + let worktree = worktree.read(cx); + proto::WorktreeMetadata { + id: worktree.id().to_proto(), + root_name: worktree.root_name().into(), + visible: worktree.is_visible(), + abs_path: worktree.abs_path().to_string_lossy().into(), + } + }) + .collect() + } + + pub fn shared( + &mut self, + remote_id: u64, + downsteam_client: AnyProtoClient, + cx: &mut ModelContext, + ) { + self.retain_worktrees = true; + self.remote_id = remote_id; + self.downstream_client = Some(downsteam_client); // When shared, retain all worktrees - if is_shared { - for worktree_handle in self.worktrees.iter_mut() { - match worktree_handle { - WorktreeHandle::Strong(_) => {} - WorktreeHandle::Weak(worktree) => { - if let Some(worktree) = worktree.upgrade() { - *worktree_handle = WorktreeHandle::Strong(worktree); - } + for worktree_handle in self.worktrees.iter_mut() { + match worktree_handle { + WorktreeHandle::Strong(_) => {} + WorktreeHandle::Weak(worktree) => { + if let Some(worktree) = worktree.upgrade() { + *worktree_handle = WorktreeHandle::Strong(worktree); } } } } + self.send_project_updates(cx); + } + + pub fn unshared(&mut self, cx: &mut ModelContext) { + self.retain_worktrees = false; + self.downstream_client.take(); + // When not shared, only retain the visible worktrees - else { - for worktree_handle in self.worktrees.iter_mut() { - if let WorktreeHandle::Strong(worktree) = worktree_handle { - let is_visible = worktree.update(cx, |worktree, _| { - worktree.stop_observing_updates(); - worktree.is_visible() - }); - if !is_visible { - *worktree_handle = WorktreeHandle::Weak(worktree.downgrade()); - } + for worktree_handle in self.worktrees.iter_mut() { + if let WorktreeHandle::Strong(worktree) = worktree_handle { + let is_visible = worktree.update(cx, |worktree, _| { + worktree.stop_observing_updates(); + worktree.is_visible() + }); + if !is_visible { + *worktree_handle = WorktreeHandle::Weak(worktree.downgrade()); } } } diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 4aab731e645934..10608b74f3593f 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -247,7 +247,8 @@ impl SshSession { let line_ix = start_ix + ix; let content = &stderr_buffer[start_ix..line_ix]; start_ix = line_ix + 1; - if let Ok(record) = serde_json::from_slice::(content) { + if let Ok(mut record) = serde_json::from_slice::(content) { + record.message = format!("(remote) {}", record.message); record.log(log::logger()) } else { eprintln!("(remote) {}", String::from_utf8_lossy(content)); @@ -469,6 +470,10 @@ impl ProtoClient for SshSession { fn message_handler_set(&self) -> &Mutex { &self.state } + + fn goes_via_collab(&self) -> bool { + false + } } impl SshClientState { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index bbd82281d86afc..54f48e36269baf 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -44,7 +44,11 @@ impl HeadlessProject { pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); - let worktree_store = cx.new_model(|_| WorktreeStore::new(true, fs.clone())); + let worktree_store = cx.new_model(|cx| { + let mut store = WorktreeStore::new(None, true, fs.clone()); + store.shared(SSH_PROJECT_ID, session.clone().into(), cx); + store + }); let buffer_store = cx.new_model(|cx| { let mut buffer_store = BufferStore::new(worktree_store.clone(), Some(SSH_PROJECT_ID), cx); @@ -196,18 +200,11 @@ impl HeadlessProject { .await?; this.update(&mut cx, |this, cx| { - let session = this.session.clone(); this.worktree_store.update(cx, |worktree_store, cx| { worktree_store.add(&worktree, cx); }); - worktree.update(cx, |worktree, cx| { - worktree.observe_updates(0, cx, move |update| { - session.send(update).ok(); - futures::future::ready(true) - }); - proto::AddWorktreeResponse { - worktree_id: worktree.id().to_proto(), - } + worktree.update(cx, |worktree, _| proto::AddWorktreeResponse { + worktree_id: worktree.id().to_proto(), }) }) } diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 4a990a84336dfd..89ef580cdfb3d7 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -27,6 +27,8 @@ pub trait ProtoClient: Send + Sync { fn send_response(&self, envelope: Envelope, message_type: &'static str) -> anyhow::Result<()>; fn message_handler_set(&self) -> &parking_lot::Mutex; + + fn goes_via_collab(&self) -> bool; } #[derive(Default)] @@ -139,6 +141,10 @@ impl AnyProtoClient { Self(client) } + pub fn goes_via_collab(&self) -> bool { + self.0.goes_via_collab() + } + pub fn request( &self, request: T, From 28a54ce122fdd5efb2e23cc77a5efab78c07061c Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Thu, 19 Sep 2024 14:16:01 -0600 Subject: [PATCH 227/762] Add diagnostic information to context of inline assistant (#18096) Release Notes: - Added Diagnostic information to inline assistant. This enables users to just say "Fix this" and have the model know what the errors are. --- assets/prompts/content_prompt.hbs | 11 +++++++++++ crates/assistant/src/prompts.rs | 23 +++++++++++++++++++++-- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/assets/prompts/content_prompt.hbs b/assets/prompts/content_prompt.hbs index cf4141349b356c..e944e230f56f9b 100644 --- a/assets/prompts/content_prompt.hbs +++ b/assets/prompts/content_prompt.hbs @@ -47,6 +47,17 @@ And here's the section to rewrite based on that prompt again for reference: {{{rewrite_section}}} + +{{#if diagnostic_errors}} +{{#each diagnostic_errors}} + + {{line_number}} + {{error_message}} + {{code_content}} + +{{/each}} +{{/if}} + {{/if}} Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved. diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index ae2ab4787e6f40..7d99a70d1419c9 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -4,13 +4,20 @@ use fs::Fs; use futures::StreamExt; use gpui::AssetSource; use handlebars::{Handlebars, RenderError}; -use language::{BufferSnapshot, LanguageName}; +use language::{BufferSnapshot, LanguageName, Point}; use parking_lot::Mutex; use serde::Serialize; use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration}; use text::LineEnding; use util::ResultExt; +#[derive(Serialize)] +pub struct ContentPromptDiagnosticContext { + pub line_number: usize, + pub error_message: String, + pub code_content: String, +} + #[derive(Serialize)] pub struct ContentPromptContext { pub content_type: String, @@ -20,6 +27,7 @@ pub struct ContentPromptContext { pub document_content: String, pub user_prompt: String, pub rewrite_section: Option, + pub diagnostic_errors: Vec, } #[derive(Serialize)] @@ -261,6 +269,17 @@ impl PromptBuilder { } else { None }; + let diagnostics = buffer.diagnostics_in_range::<_, Point>(range, false); + let diagnostic_errors: Vec = diagnostics + .map(|entry| { + let start = entry.range.start; + ContentPromptDiagnosticContext { + line_number: (start.row + 1) as usize, + error_message: entry.diagnostic.message.clone(), + code_content: buffer.text_for_range(entry.range.clone()).collect(), + } + }) + .collect(); let context = ContentPromptContext { content_type: content_type.to_string(), @@ -270,8 +289,8 @@ impl PromptBuilder { document_content, user_prompt, rewrite_section, + diagnostic_errors, }; - self.handlebars.lock().render("content_prompt", &context) } From 82e6b1e0e5fe4f2a04fba2fb6f3e7d1aae0974a1 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 19 Sep 2024 17:22:11 -0400 Subject: [PATCH 228/762] docs: Update glibc requirements for current binaries (#18101) --- docs/src/linux.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/linux.md b/docs/src/linux.md index 812a3707d01b1b..17334c325ca309 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -16,7 +16,7 @@ The Zed installed by the script works best on systems that: - have a Vulkan compatible GPU available (for example Linux on an M-series macBook) - have a system-wide glibc (NixOS and Alpine do not by default) - - x86_64 (Intel/AMD): glibc version >= 2.29 (Ubuntu 20 and newer; Amazon Linux >2023) + - x86_64 (Intel/AMD): glibc version >= 2.35 (Ubuntu 22 and newer) - aarch64 (ARM): glibc version >= 2.35 (Ubuntu 22 and newer) Both Nix and Alpine have third-party Zed packages available (though they are currently a few weeks out of date). If you'd like to use our builds they do work if you install a glibc compatibility layer. On NixOS you can try [nix-ld](https://github.com/Mic92/nix-ld), and on Alpine [gcompat](https://wiki.alpinelinux.org/wiki/Running_glibc_programs). @@ -24,7 +24,7 @@ Both Nix and Alpine have third-party Zed packages available (though they are cur You will need to build from source for: - architectures other than 64-bit Intel or 64-bit ARM (for example a 32-bit or RISC-V machine) -- Amazon Linux 2 on x86_64 +- Amazon Linux - Rocky Linux 9.3 ## Other ways to install Zed on Linux From edf2c192500194192320ff21e86a2846e5089d48 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 19 Sep 2024 15:28:30 -0600 Subject: [PATCH 229/762] Hide GPU problems from Slack (#18087) Release Notes: - N/A --------- Co-authored-by: Marshall Co-authored-by: Marshall Bowers --- crates/collab/src/api/events.rs | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 1be8f9c37b4d09..008c76e048b9d9 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -18,8 +18,8 @@ use sha2::{Digest, Sha256}; use std::sync::{Arc, OnceLock}; use telemetry_events::{ ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event, - EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent, - SettingEvent, + EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, Panic, + ReplEvent, SettingEvent, }; use uuid::Uuid; @@ -296,10 +296,11 @@ pub async fn post_panic( version = %panic.app_version, os_name = %panic.os_name, os_version = %panic.os_version.clone().unwrap_or_default(), - installation_id = %panic.installation_id.unwrap_or_default(), + installation_id = %panic.installation_id.clone().unwrap_or_default(), description = %panic.payload, backtrace = %panic.backtrace.join("\n"), - "panic report"); + "panic report" + ); let backtrace = if panic.backtrace.len() > 25 { let total = panic.backtrace.len(); @@ -317,6 +318,11 @@ pub async fn post_panic( } else { panic.backtrace.join("\n") }; + + if !report_to_slack(&panic) { + return Ok(()); + } + let backtrace_with_summary = panic.payload + "\n" + &backtrace; if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { @@ -357,6 +363,23 @@ pub async fn post_panic( Ok(()) } +fn report_to_slack(panic: &Panic) -> bool { + if panic.os_name == "Linux" { + if panic.payload.contains("ERROR_SURFACE_LOST_KHR") { + return false; + } + + if panic + .payload + .contains("GPU has crashed, and no debug information is available") + { + return false; + } + } + + true +} + pub async fn post_events( Extension(app): Extension>, TypedHeader(ZedChecksumHeader(checksum)): TypedHeader, From 740803d745e1fe3b711c3c1a05ce3a2616f123cb Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 19 Sep 2024 19:43:32 -0400 Subject: [PATCH 230/762] Bump release_notes to v2 endpoint (#18108) Partially addresses https://github.com/zed-industries/zed/issues/17527 SCR-20240919-rcik Release Notes: - Enhanced the `auto update: view release notes locally` feature to display release notes for each patch version associated with the installed minor version. --- crates/auto_update/src/auto_update.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index cfda6d6e584b91..1fe89cce0f9c4e 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -268,7 +268,7 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext Date: Thu, 19 Sep 2024 17:49:22 -0600 Subject: [PATCH 231/762] Fix prompt reloading in dev mode (#18095) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I think I nulled out the repo path to test the non dev mode case and then forgot to reenable it 🤦‍♂️ . Release Notes: - N/A --- crates/assistant/src/prompts.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 7d99a70d1419c9..3b9f75bac930b6 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -90,10 +90,9 @@ impl PromptBuilder { /// and application context. /// * `handlebars` - An `Arc>` for registering and updating templates. fn watch_fs_for_template_overrides( - mut params: PromptLoadingParams, + params: PromptLoadingParams, handlebars: Arc>>, ) { - params.repo_path = None; let templates_dir = paths::prompt_overrides_dir(params.repo_path.as_deref()); params.cx.background_executor() .spawn(async move { From 15b4130fa551e38841b69f135218ef75cfb010db Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 19 Sep 2024 17:50:00 -0600 Subject: [PATCH 232/762] Introduce the ability to cycle between alternative inline assists (#18098) Release Notes: - Added a new `assistant.inline_alternatives` setting to configure additional models that will be used to perform inline assists in parallel. --------- Co-authored-by: Nathan Co-authored-by: Roy Co-authored-by: Adam --- assets/keymaps/default-linux.json | 7 + assets/keymaps/default-macos.json | 7 + crates/assistant/src/assistant.rs | 13 + crates/assistant/src/assistant_settings.rs | 13 +- crates/assistant/src/inline_assistant.rs | 700 ++++++++++++++++----- crates/language_model/src/registry.rs | 32 + crates/multi_buffer/src/multi_buffer.rs | 20 + docs/src/assistant/configuration.md | 26 + 8 files changed, 641 insertions(+), 177 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 542f6c2df42b48..f15c4dfe22b6c0 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -520,6 +520,13 @@ "alt-enter": "editor::Newline" } }, + { + "context": "PromptEditor", + "bindings": { + "ctrl-[": "assistant::CyclePreviousInlineAssist", + "ctrl-]": "assistant::CycleNextInlineAssist" + } + }, { "context": "ProjectSearchBar && !in_replace", "bindings": { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 77fac3254bec17..a58112b3c0b927 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -527,6 +527,13 @@ "ctrl-enter": "assistant::InlineAssist" } }, + { + "context": "PromptEditor", + "bindings": { + "ctrl-[": "assistant::CyclePreviousInlineAssist", + "ctrl-]": "assistant::CycleNextInlineAssist" + } + }, { "context": "ProjectSearchBar && !in_replace", "bindings": { diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index d7466878c9ce1e..8b9c66ee55848d 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -69,6 +69,8 @@ actions!( ConfirmCommand, NewContext, ToggleModelSelector, + CycleNextInlineAssist, + CyclePreviousInlineAssist ] ); @@ -359,8 +361,19 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) { let settings = AssistantSettings::get_global(cx); let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone()); let model_id = LanguageModelId::from(settings.default_model.model.clone()); + let inline_alternatives = settings + .inline_alternatives + .iter() + .map(|alternative| { + ( + LanguageModelProviderId::from(alternative.provider.clone()), + LanguageModelId::from(alternative.model.clone()), + ) + }) + .collect::>(); LanguageModelRegistry::global(cx).update(cx, |registry, cx| { registry.select_active_model(&provider_name, &model_id, cx); + registry.select_inline_alternative_models(inline_alternatives, cx); }); } diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index e2c6a8eb24f088..5aa379bae3fce9 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -59,6 +59,7 @@ pub struct AssistantSettings { pub default_width: Pixels, pub default_height: Pixels, pub default_model: LanguageModelSelection, + pub inline_alternatives: Vec, pub using_outdated_settings_version: bool, } @@ -236,6 +237,7 @@ impl AssistantSettingsContent { }) } }), + inline_alternatives: None, }, VersionedAssistantSettingsContent::V2(settings) => settings.clone(), }, @@ -254,6 +256,7 @@ impl AssistantSettingsContent { .id() .to_string(), }), + inline_alternatives: None, }, } } @@ -369,6 +372,7 @@ impl Default for VersionedAssistantSettingsContent { default_width: None, default_height: None, default_model: None, + inline_alternatives: None, }) } } @@ -397,6 +401,8 @@ pub struct AssistantSettingsContentV2 { default_height: Option, /// The default model to use when creating new contexts. default_model: Option, + /// Additional models with which to generate alternatives when performing inline assists. + inline_alternatives: Option>, } #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] @@ -517,10 +523,8 @@ impl Settings for AssistantSettings { &mut settings.default_height, value.default_height.map(Into::into), ); - merge( - &mut settings.default_model, - value.default_model.map(Into::into), - ); + merge(&mut settings.default_model, value.default_model); + merge(&mut settings.inline_alternatives, value.inline_alternatives); // merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference } @@ -574,6 +578,7 @@ mod tests { provider: "test-provider".into(), model: "gpt-99".into(), }), + inline_alternatives: None, enabled: None, button: None, dock: None, diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index c9360213ae5138..428b33f3bbd92c 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1,7 +1,7 @@ use crate::{ assistant_settings::AssistantSettings, humanize_token_count, prompts::PromptBuilder, - AssistantPanel, AssistantPanelEvent, CharOperation, LineDiff, LineOperation, ModelSelector, - StreamingDiff, + AssistantPanel, AssistantPanelEvent, CharOperation, CycleNextInlineAssist, + CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, StreamingDiff, }; use anyhow::{anyhow, Context as _, Result}; use client::{telemetry::Telemetry, ErrorExt}; @@ -25,13 +25,13 @@ use futures::{ SinkExt, Stream, StreamExt, }; use gpui::{ - anchored, deferred, point, AppContext, ClickEvent, EventEmitter, FocusHandle, FocusableView, - FontWeight, Global, HighlightStyle, Model, ModelContext, Subscription, Task, TextStyle, - UpdateGlobal, View, ViewContext, WeakView, WindowContext, + anchored, deferred, point, AnyElement, AppContext, ClickEvent, EventEmitter, FocusHandle, + FocusableView, FontWeight, Global, HighlightStyle, Model, ModelContext, Subscription, Task, + TextStyle, UpdateGlobal, View, ViewContext, WeakView, WindowContext, }; use language::{Buffer, IndentKind, Point, Selection, TransactionId}; use language_model::{ - LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, + LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, }; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; @@ -41,7 +41,7 @@ use smol::future::FutureExt; use std::{ cmp, future::{self, Future}, - mem, + iter, mem, ops::{Range, RangeInclusive}, pin::Pin, sync::Arc, @@ -85,7 +85,7 @@ pub struct InlineAssistant { async_watch::Receiver, ), >, - confirmed_assists: HashMap>, + confirmed_assists: HashMap>, prompt_history: VecDeque, prompt_builder: Arc, telemetry: Option>, @@ -157,7 +157,7 @@ impl InlineAssistant { if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { for assist_id in editor_assists.assist_ids.clone() { let assist = &self.assists[&assist_id]; - if let CodegenStatus::Done = &assist.codegen.read(cx).status { + if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { self.finish_assist(assist_id, false, cx) } } @@ -553,7 +553,7 @@ impl InlineAssistant { let assist_range = assist.range.to_offset(&buffer); if assist_range.contains(&selection.start) && assist_range.contains(&selection.end) { - if matches!(assist.codegen.read(cx).status, CodegenStatus::Pending) { + if matches!(assist.codegen.read(cx).status(cx), CodegenStatus::Pending) { self.dismiss_assist(*assist_id, cx); } else { self.finish_assist(*assist_id, false, cx); @@ -671,7 +671,7 @@ impl InlineAssistant { for assist_id in editor_assists.assist_ids.clone() { let assist = &self.assists[&assist_id]; if matches!( - assist.codegen.read(cx).status, + assist.codegen.read(cx).status(cx), CodegenStatus::Error(_) | CodegenStatus::Done ) { let assist_range = assist.range.to_offset(&snapshot); @@ -774,7 +774,9 @@ impl InlineAssistant { if undo { assist.codegen.update(cx, |codegen, cx| codegen.undo(cx)); } else { - self.confirmed_assists.insert(assist_id, assist.codegen); + let confirmed_alternative = assist.codegen.read(cx).active_alternative().clone(); + self.confirmed_assists + .insert(assist_id, confirmed_alternative); } } @@ -978,12 +980,7 @@ impl InlineAssistant { assist .codegen .update(cx, |codegen, cx| { - codegen.start( - assist.range.clone(), - user_prompt, - assistant_panel_context, - cx, - ) + codegen.start(user_prompt, assistant_panel_context, cx) }) .log_err(); @@ -1008,7 +1005,7 @@ impl InlineAssistant { pub fn assist_status(&self, assist_id: InlineAssistId, cx: &AppContext) -> InlineAssistStatus { if let Some(assist) = self.assists.get(&assist_id) { - match &assist.codegen.read(cx).status { + match assist.codegen.read(cx).status(cx) { CodegenStatus::Idle => InlineAssistStatus::Idle, CodegenStatus::Pending => InlineAssistStatus::Pending, CodegenStatus::Done => InlineAssistStatus::Done, @@ -1037,16 +1034,16 @@ impl InlineAssistant { for assist_id in assist_ids { if let Some(assist) = self.assists.get(assist_id) { let codegen = assist.codegen.read(cx); - let buffer = codegen.buffer.read(cx).read(cx); - foreground_ranges.extend(codegen.last_equal_ranges().iter().cloned()); + let buffer = codegen.buffer(cx).read(cx).read(cx); + foreground_ranges.extend(codegen.last_equal_ranges(cx).iter().cloned()); let pending_range = - codegen.edit_position.unwrap_or(assist.range.start)..assist.range.end; + codegen.edit_position(cx).unwrap_or(assist.range.start)..assist.range.end; if pending_range.end.to_offset(&buffer) > pending_range.start.to_offset(&buffer) { gutter_pending_ranges.push(pending_range); } - if let Some(edit_position) = codegen.edit_position { + if let Some(edit_position) = codegen.edit_position(cx) { let edited_range = assist.range.start..edit_position; if edited_range.end.to_offset(&buffer) > edited_range.start.to_offset(&buffer) { gutter_transformed_ranges.push(edited_range); @@ -1054,7 +1051,8 @@ impl InlineAssistant { } if assist.decorations.is_some() { - inserted_row_ranges.extend(codegen.diff.inserted_row_ranges.iter().cloned()); + inserted_row_ranges + .extend(codegen.diff(cx).inserted_row_ranges.iter().cloned()); } } } @@ -1125,9 +1123,9 @@ impl InlineAssistant { }; let codegen = assist.codegen.read(cx); - let old_snapshot = codegen.snapshot.clone(); - let old_buffer = codegen.old_buffer.clone(); - let deleted_row_ranges = codegen.diff.deleted_row_ranges.clone(); + let old_snapshot = codegen.snapshot(cx); + let old_buffer = codegen.old_buffer(cx); + let deleted_row_ranges = codegen.diff(cx).deleted_row_ranges.clone(); editor.update(cx, |editor, cx| { let old_blocks = mem::take(&mut decorations.removed_line_block_ids); @@ -1406,8 +1404,15 @@ impl EventEmitter for PromptEditor {} impl Render for PromptEditor { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let gutter_dimensions = *self.gutter_dimensions.lock(); - let status = &self.codegen.read(cx).status; - let buttons = match status { + let codegen = self.codegen.read(cx); + + let mut buttons = Vec::new(); + if codegen.alternative_count(cx) > 1 { + buttons.push(self.render_cycle_controls(cx)); + } + + let status = codegen.status(cx); + buttons.extend(match status { CodegenStatus::Idle => { vec![ IconButton::new("cancel", IconName::Close) @@ -1416,14 +1421,16 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), IconButton::new("start", IconName::SparkleAlt) .icon_color(Color::Muted) .shape(IconButtonShape::Square) .tooltip(|cx| Tooltip::for_action("Transform", &menu::Confirm, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StartRequested)), - ), + ) + .into_any_element(), ] } CodegenStatus::Pending => { @@ -1434,7 +1441,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::text("Cancel Assist", cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), IconButton::new("stop", IconName::Stop) .icon_color(Color::Error) .shape(IconButtonShape::Square) @@ -1446,9 +1454,8 @@ impl Render for PromptEditor { cx, ) }) - .on_click( - cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StopRequested)), - ), + .on_click(cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StopRequested))) + .into_any_element(), ] } CodegenStatus::Error(_) | CodegenStatus::Done => { @@ -1459,7 +1466,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), if self.edited_since_done || matches!(status, CodegenStatus::Error(_)) { IconButton::new("restart", IconName::RotateCw) .icon_color(Color::Info) @@ -1475,6 +1483,7 @@ impl Render for PromptEditor { .on_click(cx.listener(|_, _, cx| { cx.emit(PromptEditorEvent::StartRequested); })) + .into_any_element() } else { IconButton::new("confirm", IconName::Check) .icon_color(Color::Info) @@ -1483,12 +1492,14 @@ impl Render for PromptEditor { .on_click(cx.listener(|_, _, cx| { cx.emit(PromptEditorEvent::ConfirmRequested); })) + .into_any_element() }, ] } - }; + }); h_flex() + .key_context("PromptEditor") .bg(cx.theme().colors().editor_background) .border_y_1() .border_color(cx.theme().status().info_border) @@ -1498,6 +1509,8 @@ impl Render for PromptEditor { .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::move_up)) .on_action(cx.listener(Self::move_down)) + .capture_action(cx.listener(Self::cycle_prev)) + .capture_action(cx.listener(Self::cycle_next)) .child( h_flex() .w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0)) @@ -1532,7 +1545,7 @@ impl Render for PromptEditor { ), ) .map(|el| { - let CodegenStatus::Error(error) = &self.codegen.read(cx).status else { + let CodegenStatus::Error(error) = self.codegen.read(cx).status(cx) else { return el; }; @@ -1776,7 +1789,7 @@ impl PromptEditor { } fn handle_codegen_changed(&mut self, _: Model, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle => { self.editor .update(cx, |editor, _| editor.set_read_only(false)); @@ -1807,7 +1820,7 @@ impl PromptEditor { } fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle | CodegenStatus::Done | CodegenStatus::Error(_) => { cx.emit(PromptEditorEvent::CancelRequested); } @@ -1818,7 +1831,7 @@ impl PromptEditor { } fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle => { cx.emit(PromptEditorEvent::StartRequested); } @@ -1878,6 +1891,79 @@ impl PromptEditor { } } + fn cycle_prev(&mut self, _: &CyclePreviousInlineAssist, cx: &mut ViewContext) { + self.codegen + .update(cx, |codegen, cx| codegen.cycle_prev(cx)); + } + + fn cycle_next(&mut self, _: &CycleNextInlineAssist, cx: &mut ViewContext) { + self.codegen + .update(cx, |codegen, cx| codegen.cycle_next(cx)); + } + + fn render_cycle_controls(&self, cx: &ViewContext) -> AnyElement { + let codegen = self.codegen.read(cx); + let disabled = matches!(codegen.status(cx), CodegenStatus::Idle); + + h_flex() + .child( + IconButton::new("previous", IconName::ChevronLeft) + .icon_color(Color::Muted) + .disabled(disabled) + .shape(IconButtonShape::Square) + .tooltip({ + let focus_handle = self.editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Previous Alternative", + &CyclePreviousInlineAssist, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(|this, _, cx| { + this.codegen + .update(cx, |codegen, cx| codegen.cycle_prev(cx)) + })), + ) + .child( + Label::new(format!( + "{}/{}", + codegen.active_alternative + 1, + codegen.alternative_count(cx) + )) + .size(LabelSize::Small) + .color(if disabled { + Color::Disabled + } else { + Color::Muted + }), + ) + .child( + IconButton::new("next", IconName::ChevronRight) + .icon_color(Color::Muted) + .disabled(disabled) + .shape(IconButtonShape::Square) + .tooltip({ + let focus_handle = self.editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Next Alternative", + &CycleNextInlineAssist, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(|this, _, cx| { + this.codegen + .update(cx, |codegen, cx| codegen.cycle_next(cx)) + })), + ) + .into_any_element() + } + fn render_token_count(&self, cx: &mut ViewContext) -> Option { let model = LanguageModelRegistry::read_global(cx).active_model()?; let token_counts = self.token_counts?; @@ -2124,7 +2210,7 @@ impl InlineAssist { return; }; - if let CodegenStatus::Error(error) = &codegen.read(cx).status { + if let CodegenStatus::Error(error) = codegen.read(cx).status(cx) { if assist.decorations.is_none() { if let Some(workspace) = assist .workspace @@ -2185,12 +2271,9 @@ impl InlineAssist { return future::ready(Err(anyhow!("no user prompt"))).boxed(); }; let assistant_panel_context = self.assistant_panel_context(cx); - self.codegen.read(cx).count_tokens( - self.range.clone(), - user_prompt, - assistant_panel_context, - cx, - ) + self.codegen + .read(cx) + .count_tokens(user_prompt, assistant_panel_context, cx) } } @@ -2201,19 +2284,216 @@ struct InlineAssistDecorations { end_block_id: CustomBlockId, } -#[derive(Debug)] +#[derive(Copy, Clone, Debug)] pub enum CodegenEvent { Finished, Undone, } pub struct Codegen { + alternatives: Vec>, + active_alternative: usize, + subscriptions: Vec, + buffer: Model, + range: Range, + initial_transaction_id: Option, + telemetry: Option>, + builder: Arc, +} + +impl Codegen { + pub fn new( + buffer: Model, + range: Range, + initial_transaction_id: Option, + telemetry: Option>, + builder: Arc, + cx: &mut ModelContext, + ) -> Self { + let codegen = cx.new_model(|cx| { + CodegenAlternative::new( + buffer.clone(), + range.clone(), + false, + telemetry.clone(), + builder.clone(), + cx, + ) + }); + let mut this = Self { + alternatives: vec![codegen], + active_alternative: 0, + subscriptions: Vec::new(), + buffer, + range, + initial_transaction_id, + telemetry, + builder, + }; + this.activate(0, cx); + this + } + + fn subscribe_to_alternative(&mut self, cx: &mut ModelContext) { + let codegen = self.active_alternative().clone(); + self.subscriptions.clear(); + self.subscriptions + .push(cx.observe(&codegen, |_, _, cx| cx.notify())); + self.subscriptions + .push(cx.subscribe(&codegen, |_, _, event, cx| cx.emit(*event))); + } + + fn active_alternative(&self) -> &Model { + &self.alternatives[self.active_alternative] + } + + fn status<'a>(&self, cx: &'a AppContext) -> &'a CodegenStatus { + &self.active_alternative().read(cx).status + } + + fn alternative_count(&self, cx: &AppContext) -> usize { + LanguageModelRegistry::read_global(cx) + .inline_alternative_models() + .len() + + 1 + } + + pub fn cycle_prev(&mut self, cx: &mut ModelContext) { + let next_active_ix = if self.active_alternative == 0 { + self.alternatives.len() - 1 + } else { + self.active_alternative - 1 + }; + self.activate(next_active_ix, cx); + } + + pub fn cycle_next(&mut self, cx: &mut ModelContext) { + let next_active_ix = (self.active_alternative + 1) % self.alternatives.len(); + self.activate(next_active_ix, cx); + } + + fn activate(&mut self, index: usize, cx: &mut ModelContext) { + self.active_alternative() + .update(cx, |codegen, cx| codegen.set_active(false, cx)); + self.active_alternative = index; + self.active_alternative() + .update(cx, |codegen, cx| codegen.set_active(true, cx)); + self.subscribe_to_alternative(cx); + cx.notify(); + } + + pub fn start( + &mut self, + user_prompt: String, + assistant_panel_context: Option, + cx: &mut ModelContext, + ) -> Result<()> { + let alternative_models = LanguageModelRegistry::read_global(cx) + .inline_alternative_models() + .to_vec(); + + self.active_alternative() + .update(cx, |alternative, cx| alternative.undo(cx)); + self.activate(0, cx); + self.alternatives.truncate(1); + + for _ in 0..alternative_models.len() { + self.alternatives.push(cx.new_model(|cx| { + CodegenAlternative::new( + self.buffer.clone(), + self.range.clone(), + false, + self.telemetry.clone(), + self.builder.clone(), + cx, + ) + })); + } + + let primary_model = LanguageModelRegistry::read_global(cx) + .active_model() + .context("no active model")?; + + for (model, alternative) in iter::once(primary_model) + .chain(alternative_models) + .zip(&self.alternatives) + { + alternative.update(cx, |alternative, cx| { + alternative.start( + user_prompt.clone(), + assistant_panel_context.clone(), + model.clone(), + cx, + ) + })?; + } + + Ok(()) + } + + pub fn stop(&mut self, cx: &mut ModelContext) { + for codegen in &self.alternatives { + codegen.update(cx, |codegen, cx| codegen.stop(cx)); + } + } + + pub fn undo(&mut self, cx: &mut ModelContext) { + self.active_alternative() + .update(cx, |codegen, cx| codegen.undo(cx)); + + self.buffer.update(cx, |buffer, cx| { + if let Some(transaction_id) = self.initial_transaction_id.take() { + buffer.undo_transaction(transaction_id, cx); + buffer.refresh_preview(cx); + } + }); + } + + pub fn count_tokens( + &self, + user_prompt: String, + assistant_panel_context: Option, + cx: &AppContext, + ) -> BoxFuture<'static, Result> { + self.active_alternative() + .read(cx) + .count_tokens(user_prompt, assistant_panel_context, cx) + } + + pub fn buffer(&self, cx: &AppContext) -> Model { + self.active_alternative().read(cx).buffer.clone() + } + + pub fn old_buffer(&self, cx: &AppContext) -> Model { + self.active_alternative().read(cx).old_buffer.clone() + } + + pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { + self.active_alternative().read(cx).snapshot.clone() + } + + pub fn edit_position(&self, cx: &AppContext) -> Option { + self.active_alternative().read(cx).edit_position + } + + fn diff<'a>(&self, cx: &'a AppContext) -> &'a Diff { + &self.active_alternative().read(cx).diff + } + + pub fn last_equal_ranges<'a>(&self, cx: &'a AppContext) -> &'a [Range] { + self.active_alternative().read(cx).last_equal_ranges() + } +} + +impl EventEmitter for Codegen {} + +pub struct CodegenAlternative { buffer: Model, old_buffer: Model, snapshot: MultiBufferSnapshot, edit_position: Option, + range: Range, last_equal_ranges: Vec>, - initial_transaction_id: Option, transformation_transaction_id: Option, status: CodegenStatus, generation: Task<()>, @@ -2221,6 +2501,9 @@ pub struct Codegen { telemetry: Option>, _subscription: gpui::Subscription, builder: Arc, + active: bool, + edits: Vec<(Range, String)>, + line_operations: Vec, } enum CodegenStatus { @@ -2242,13 +2525,13 @@ impl Diff { } } -impl EventEmitter for Codegen {} +impl EventEmitter for CodegenAlternative {} -impl Codegen { +impl CodegenAlternative { pub fn new( buffer: Model, range: Range, - initial_transaction_id: Option, + active: bool, telemetry: Option>, builder: Arc, cx: &mut ModelContext, @@ -2287,8 +2570,33 @@ impl Codegen { diff: Diff::default(), telemetry, _subscription: cx.subscribe(&buffer, Self::handle_buffer_event), - initial_transaction_id, builder, + active, + edits: Vec::new(), + line_operations: Vec::new(), + range, + } + } + + fn set_active(&mut self, active: bool, cx: &mut ModelContext) { + if active != self.active { + self.active = active; + + if self.active { + let edits = self.edits.clone(); + self.apply_edits(edits, cx); + if matches!(self.status, CodegenStatus::Pending) { + let line_operations = self.line_operations.clone(); + self.reapply_line_based_diff(line_operations, cx); + } else { + self.reapply_batch_diff(cx).detach(); + } + } else if let Some(transaction_id) = self.transformation_transaction_id.take() { + self.buffer.update(cx, |buffer, cx| { + buffer.undo_transaction(transaction_id, cx); + buffer.forget_transaction(transaction_id, cx); + }); + } } } @@ -2313,14 +2621,12 @@ impl Codegen { pub fn count_tokens( &self, - edit_range: Range, user_prompt: String, assistant_panel_context: Option, cx: &AppContext, ) -> BoxFuture<'static, Result> { if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { - let request = - self.build_request(user_prompt, assistant_panel_context.clone(), edit_range, cx); + let request = self.build_request(user_prompt, assistant_panel_context.clone(), cx); match request { Ok(request) => { let total_count = model.count_tokens(request.clone(), cx); @@ -2345,39 +2651,31 @@ impl Codegen { pub fn start( &mut self, - edit_range: Range, user_prompt: String, assistant_panel_context: Option, + model: Arc, cx: &mut ModelContext, ) -> Result<()> { - let model = LanguageModelRegistry::read_global(cx) - .active_model() - .context("no active model")?; - if let Some(transformation_transaction_id) = self.transformation_transaction_id.take() { self.buffer.update(cx, |buffer, cx| { buffer.undo_transaction(transformation_transaction_id, cx); }); } - self.edit_position = Some(edit_range.start.bias_right(&self.snapshot)); + self.edit_position = Some(self.range.start.bias_right(&self.snapshot)); let telemetry_id = model.telemetry_id(); - let chunks: LocalBoxFuture>>> = if user_prompt - .trim() - .to_lowercase() - == "delete" - { - async { Ok(stream::empty().boxed()) }.boxed_local() - } else { - let request = - self.build_request(user_prompt, assistant_panel_context, edit_range.clone(), cx)?; + let chunks: LocalBoxFuture>>> = + if user_prompt.trim().to_lowercase() == "delete" { + async { Ok(stream::empty().boxed()) }.boxed_local() + } else { + let request = self.build_request(user_prompt, assistant_panel_context, cx)?; - let chunks = - cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }); - async move { Ok(chunks.await?.boxed()) }.boxed_local() - }; - self.handle_stream(telemetry_id, edit_range, chunks, cx); + let chunks = cx + .spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }); + async move { Ok(chunks.await?.boxed()) }.boxed_local() + }; + self.handle_stream(telemetry_id, chunks, cx); Ok(()) } @@ -2385,11 +2683,10 @@ impl Codegen { &self, user_prompt: String, assistant_panel_context: Option, - edit_range: Range, cx: &AppContext, ) -> Result { let buffer = self.buffer.read(cx).snapshot(cx); - let language = buffer.language_at(edit_range.start); + let language = buffer.language_at(self.range.start); let language_name = if let Some(language) = language.as_ref() { if Arc::ptr_eq(language, &language::PLAIN_TEXT) { None @@ -2401,8 +2698,8 @@ impl Codegen { }; let language_name = language_name.as_ref(); - let start = buffer.point_to_buffer_offset(edit_range.start); - let end = buffer.point_to_buffer_offset(edit_range.end); + let start = buffer.point_to_buffer_offset(self.range.start); + let end = buffer.point_to_buffer_offset(self.range.end); let (buffer, range) = if let Some((start, end)) = start.zip(end) { let (start_buffer, start_buffer_offset) = start; let (end_buffer, end_buffer_offset) = end; @@ -2442,16 +2739,15 @@ impl Codegen { pub fn handle_stream( &mut self, model_telemetry_id: String, - edit_range: Range, stream: impl 'static + Future>>>, cx: &mut ModelContext, ) { let snapshot = self.snapshot.clone(); let selected_text = snapshot - .text_for_range(edit_range.start..edit_range.end) + .text_for_range(self.range.start..self.range.end) .collect::(); - let selection_start = edit_range.start.to_point(&snapshot); + let selection_start = self.range.start.to_point(&snapshot); // Start with the indentation of the first line in the selection let mut suggested_line_indent = snapshot @@ -2462,7 +2758,7 @@ impl Codegen { // If the first line in the selection does not have indentation, check the following lines if suggested_line_indent.len == 0 && suggested_line_indent.kind == IndentKind::Space { - for row in selection_start.row..=edit_range.end.to_point(&snapshot).row { + for row in selection_start.row..=self.range.end.to_point(&snapshot).row { let line_indent = snapshot.indent_size_for_line(MultiBufferRow(row)); // Prefer tabs if a line in the selection uses tabs as indentation if line_indent.kind == IndentKind::Tab { @@ -2475,7 +2771,7 @@ impl Codegen { let telemetry = self.telemetry.clone(); self.diff = Diff::default(); self.status = CodegenStatus::Pending; - let mut edit_start = edit_range.start.to_offset(&snapshot); + let mut edit_start = self.range.start.to_offset(&snapshot); self.generation = cx.spawn(|codegen, mut cx| { async move { let chunks = stream.await; @@ -2597,68 +2893,42 @@ impl Codegen { Ok(()) }); - while let Some((char_ops, line_diff)) = diff_rx.next().await { + while let Some((char_ops, line_ops)) = diff_rx.next().await { codegen.update(&mut cx, |codegen, cx| { codegen.last_equal_ranges.clear(); - let transaction = codegen.buffer.update(cx, |buffer, cx| { - // Avoid grouping assistant edits with user edits. - buffer.finalize_last_transaction(cx); - - buffer.start_transaction(cx); - buffer.edit( - char_ops - .into_iter() - .filter_map(|operation| match operation { - CharOperation::Insert { text } => { - let edit_start = snapshot.anchor_after(edit_start); - Some((edit_start..edit_start, text)) - } - CharOperation::Delete { bytes } => { - let edit_end = edit_start + bytes; - let edit_range = snapshot.anchor_after(edit_start) - ..snapshot.anchor_before(edit_end); - edit_start = edit_end; - Some((edit_range, String::new())) - } - CharOperation::Keep { bytes } => { - let edit_end = edit_start + bytes; - let edit_range = snapshot.anchor_after(edit_start) - ..snapshot.anchor_before(edit_end); - edit_start = edit_end; - codegen.last_equal_ranges.push(edit_range); - None - } - }), - None, - cx, - ); - codegen.edit_position = Some(snapshot.anchor_after(edit_start)); - - buffer.end_transaction(cx) - }); + let edits = char_ops + .into_iter() + .filter_map(|operation| match operation { + CharOperation::Insert { text } => { + let edit_start = snapshot.anchor_after(edit_start); + Some((edit_start..edit_start, text)) + } + CharOperation::Delete { bytes } => { + let edit_end = edit_start + bytes; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + Some((edit_range, String::new())) + } + CharOperation::Keep { bytes } => { + let edit_end = edit_start + bytes; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + codegen.last_equal_ranges.push(edit_range); + None + } + }) + .collect::>(); - if let Some(transaction) = transaction { - if let Some(first_transaction) = - codegen.transformation_transaction_id - { - // Group all assistant edits into the first transaction. - codegen.buffer.update(cx, |buffer, cx| { - buffer.merge_transactions( - transaction, - first_transaction, - cx, - ) - }); - } else { - codegen.transformation_transaction_id = Some(transaction); - codegen.buffer.update(cx, |buffer, cx| { - buffer.finalize_last_transaction(cx) - }); - } + if codegen.active { + codegen.apply_edits(edits.iter().cloned(), cx); + codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx); } - - codegen.reapply_line_based_diff(edit_range.clone(), line_diff, cx); + codegen.edits.extend(edits); + codegen.line_operations = line_ops; + codegen.edit_position = Some(snapshot.anchor_after(edit_start)); cx.notify(); })?; @@ -2667,9 +2937,8 @@ impl Codegen { // Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer. // That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff. // It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`. - let batch_diff_task = codegen.update(&mut cx, |codegen, cx| { - codegen.reapply_batch_diff(edit_range.clone(), cx) - })?; + let batch_diff_task = + codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?; let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task); line_based_stream_diff?; @@ -2713,24 +2982,45 @@ impl Codegen { buffer.undo_transaction(transaction_id, cx); buffer.refresh_preview(cx); } + }); + } - if let Some(transaction_id) = self.initial_transaction_id.take() { - buffer.undo_transaction(transaction_id, cx); - buffer.refresh_preview(cx); - } + fn apply_edits( + &mut self, + edits: impl IntoIterator, String)>, + cx: &mut ModelContext, + ) { + let transaction = self.buffer.update(cx, |buffer, cx| { + // Avoid grouping assistant edits with user edits. + buffer.finalize_last_transaction(cx); + buffer.start_transaction(cx); + buffer.edit(edits, None, cx); + buffer.end_transaction(cx) }); + + if let Some(transaction) = transaction { + if let Some(first_transaction) = self.transformation_transaction_id { + // Group all assistant edits into the first transaction. + self.buffer.update(cx, |buffer, cx| { + buffer.merge_transactions(transaction, first_transaction, cx) + }); + } else { + self.transformation_transaction_id = Some(transaction); + self.buffer + .update(cx, |buffer, cx| buffer.finalize_last_transaction(cx)); + } + } } fn reapply_line_based_diff( &mut self, - edit_range: Range, - line_operations: Vec, + line_operations: impl IntoIterator, cx: &mut ModelContext, ) { let old_snapshot = self.snapshot.clone(); - let old_range = edit_range.to_point(&old_snapshot); + let old_range = self.range.to_point(&old_snapshot); let new_snapshot = self.buffer.read(cx).snapshot(cx); - let new_range = edit_range.to_point(&new_snapshot); + let new_range = self.range.to_point(&new_snapshot); let mut old_row = old_range.start.row; let mut new_row = new_range.start.row; @@ -2781,15 +3071,11 @@ impl Codegen { } } - fn reapply_batch_diff( - &mut self, - edit_range: Range, - cx: &mut ModelContext, - ) -> Task<()> { + fn reapply_batch_diff(&mut self, cx: &mut ModelContext) -> Task<()> { let old_snapshot = self.snapshot.clone(); - let old_range = edit_range.to_point(&old_snapshot); + let old_range = self.range.to_point(&old_snapshot); let new_snapshot = self.buffer.read(cx).snapshot(cx); - let new_range = edit_range.to_point(&new_snapshot); + let new_range = self.range.to_point(&new_snapshot); cx.spawn(|codegen, mut cx| async move { let (deleted_row_ranges, inserted_row_ranges) = cx @@ -3073,10 +3359,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3087,7 +3373,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range, future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3145,10 +3430,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3159,7 +3444,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3220,10 +3504,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3234,7 +3518,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3294,10 +3577,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3308,7 +3591,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3338,6 +3620,78 @@ mod tests { ); } + #[gpui::test] + async fn test_inactive_codegen_alternative(cx: &mut TestAppContext) { + cx.update(LanguageModelRegistry::test); + cx.set_global(cx.update(SettingsStore::test)); + cx.update(language_settings::init); + + let text = indoc! {" + fn main() { + let x = 0; + } + "}; + let buffer = + cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let range = buffer.read_with(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 14)) + }); + let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); + let codegen = cx.new_model(|cx| { + CodegenAlternative::new( + buffer.clone(), + range.clone(), + false, + None, + prompt_builder, + cx, + ) + }); + + let (chunks_tx, chunks_rx) = mpsc::unbounded(); + codegen.update(cx, |codegen, cx| { + codegen.handle_stream( + String::new(), + future::ready(Ok(chunks_rx.map(Ok).boxed())), + cx, + ) + }); + + chunks_tx + .unbounded_send("let mut x = 0;\nx += 1;".to_string()) + .unwrap(); + drop(chunks_tx); + cx.run_until_parked(); + + // The codegen is inactive, so the buffer doesn't get modified. + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + text + ); + + // Activating the codegen applies the changes. + codegen.update(cx, |codegen, cx| codegen.set_active(true, cx)); + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + indoc! {" + fn main() { + let mut x = 0; + x += 1; + } + "} + ); + + // Deactivating the codegen undoes the changes. + codegen.update(cx, |codegen, cx| codegen.set_active(false, cx)); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + text + ); + } + #[gpui::test] async fn test_strip_invalid_spans_from_codeblock() { assert_chunks("Lorem ipsum dolor", "Lorem ipsum dolor").await; diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index b3c8ef5f57cc6b..e1ba1c588695c6 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -76,6 +76,7 @@ impl Global for GlobalLanguageModelRegistry {} pub struct LanguageModelRegistry { active_model: Option, providers: BTreeMap>, + inline_alternatives: Vec>, } pub struct ActiveModel { @@ -229,6 +230,37 @@ impl LanguageModelRegistry { pub fn active_model(&self) -> Option> { self.active_model.as_ref()?.model.clone() } + + /// Selects and sets the inline alternatives for language models based on + /// provider name and id. + pub fn select_inline_alternative_models( + &mut self, + alternatives: impl IntoIterator, + cx: &mut ModelContext, + ) { + let mut selected_alternatives = Vec::new(); + + for (provider_id, model_id) in alternatives { + if let Some(provider) = self.providers.get(&provider_id) { + if let Some(model) = provider + .provided_models(cx) + .iter() + .find(|m| m.id() == model_id) + { + selected_alternatives.push(model.clone()); + } + } + } + + self.inline_alternatives = selected_alternatives; + } + + /// The models to use for inline assists. Returns the union of the active + /// model and all inline alternatives. When there are multiple models, the + /// user will be able to cycle through results. + pub fn inline_alternative_models(&self) -> &[Arc] { + &self.inline_alternatives + } } #[cfg(test)] diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 29bd9a80682a10..c163dbc07a6407 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1106,6 +1106,26 @@ impl MultiBuffer { } } + pub fn forget_transaction( + &mut self, + transaction_id: TransactionId, + cx: &mut ModelContext, + ) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, _| { + buffer.forget_transaction(transaction_id); + }); + } else if let Some(transaction) = self.history.forget(transaction_id) { + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { + if let Some(state) = self.buffers.borrow_mut().get_mut(&buffer_id) { + state.buffer.update(cx, |buffer, _| { + buffer.forget_transaction(buffer_transaction_id); + }); + } + } + } + } + pub fn stream_excerpts_with_context_lines( &mut self, buffer: Model, diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index bcdf461e2c3697..17b52a27d88010 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -20,6 +20,7 @@ To further customize providers, you can use `settings.json` to do that as follow - [Configuring endpoints](#custom-endpoint) - [Configuring timeouts](#provider-timeout) - [Configuring default model](#default-model) +- [Configuring alternative models for inline assists](#alternative-assists) ### Zed AI {#zed-ai} @@ -264,6 +265,31 @@ You can also manually edit the `default_model` object in your settings: } ``` +#### Configuring alternative models for inline assists {#alternative-assists} + +You can configure additional models that will be used to perform inline assists in parallel. When you do this, +the inline assist UI will surface controls to cycle between the alternatives generated by each model. The models +you specify here are always used in _addition_ to your default model. For example, the following configuration +will generate two outputs for every assist. One with Claude 3.5 Sonnet, and one with GPT-4o. + +```json +{ + "assistant": { + "default_model": { + "provider": "zed.dev", + "model": "claude-3-5-sonnet" + }, + "inline_alternatives": [ + { + "provider": "zed.dev", + "model": "gpt-4o" + } + ], + "version": "2" + } +} +``` + #### Common Panel Settings | key | type | default | description | From 8103ac12bfc596f0f32f041239e0e26f9c2ee4cc Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Fri, 20 Sep 2024 06:36:50 +0300 Subject: [PATCH 233/762] ssh-remoting: Tidy up the code a bit after #18094 (#18102) Release Notes: - N/A --- crates/client/src/client.rs | 2 +- crates/project/src/worktree_store.rs | 7 ++++--- crates/remote/src/ssh_session.rs | 2 +- crates/rpc/src/proto_client.rs | 6 +++--- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index a8387f7c5ac9ba..48bd646d8aa70e 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1622,7 +1622,7 @@ impl ProtoClient for Client { &self.handler_set } - fn goes_via_collab(&self) -> bool { + fn is_via_collab(&self) -> bool { true } } diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 7fae8b9e1dbdd7..5c3b2a00a98665 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -313,9 +313,10 @@ impl WorktreeStore { }) } + #[track_caller] pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { let worktree_id = worktree.read(cx).id(); - debug_assert!(!self.worktrees().any(|w| w.read(cx).id() == worktree_id)); + debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id)); let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible(); let handle = if push_strong_handle { @@ -487,7 +488,7 @@ impl WorktreeStore { }; // collab has bad concurrency guarantees, so we send requests in serial. - let update_project = if downstream_client.goes_via_collab() { + let update_project = if downstream_client.is_via_collab() { Some(downstream_client.request(update)) } else { downstream_client.send(update).log_err(); @@ -508,7 +509,7 @@ impl WorktreeStore { move |update| { let client = client.clone(); async move { - if client.goes_via_collab() { + if client.is_via_collab() { client.request(update).map(|result| result.is_ok()).await } else { client.send(update).is_ok() diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 10608b74f3593f..2bd18aa37e19d1 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -471,7 +471,7 @@ impl ProtoClient for SshSession { &self.state } - fn goes_via_collab(&self) -> bool { + fn is_via_collab(&self) -> bool { false } } diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 89ef580cdfb3d7..88099102765ed7 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -28,7 +28,7 @@ pub trait ProtoClient: Send + Sync { fn message_handler_set(&self) -> &parking_lot::Mutex; - fn goes_via_collab(&self) -> bool; + fn is_via_collab(&self) -> bool; } #[derive(Default)] @@ -141,8 +141,8 @@ impl AnyProtoClient { Self(client) } - pub fn goes_via_collab(&self) -> bool { - self.0.goes_via_collab() + pub fn is_via_collab(&self) -> bool { + self.0.is_via_collab() } pub fn request( From 579267f399816ae9e54b79c92949384a0ac8455a Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 08:04:26 +0200 Subject: [PATCH 234/762] docs: Update JavaScript docs and remove TBDs (#17989) Release Notes: - N/A --- docs/src/languages/javascript.md | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/docs/src/languages/javascript.md b/docs/src/languages/javascript.md index 8fb84881ada0f8..7e74cbbfaebb41 100644 --- a/docs/src/languages/javascript.md +++ b/docs/src/languages/javascript.md @@ -26,17 +26,15 @@ For example, if you have Prettier installed and on your `PATH`, you can use it t } ``` - +Zed supports JSDoc syntax in JavaScript and TypeScript comments that match the JSDoc syntax. Zed uses [tree-sitter/tree-sitter-jsdoc](https://github.com/tree-sitter/tree-sitter-jsdoc) for parsing and highlighting JSDoc. ## ESLint From 93730983dd31bad1855edd3d5943a617f83f2b40 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 08:04:49 +0200 Subject: [PATCH 235/762] ssh remoting: Restore items/buffers when opening SSH project (#18083) Demo: https://github.com/user-attachments/assets/ab79ed0d-13a6-4ae7-8e76-6365fc322ec4 Release Notes: - N/A Co-authored-by: Bennet --- crates/editor/src/items.rs | 8 ++++++-- crates/workspace/src/workspace.rs | 27 +++++++++++++++++---------- 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index e3e8ca604b28c3..3d04eb82d38e39 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1087,10 +1087,14 @@ impl SerializableItem for Editor { let workspace_id = workspace.database_id()?; let buffer = self.buffer().read(cx).as_singleton()?; + let path = buffer + .read(cx) + .file() + .map(|file| file.full_path(cx)) + .and_then(|full_path| project.read(cx).find_project_path(&full_path, cx)) + .and_then(|project_path| project.read(cx).absolute_path(&project_path, cx)); let is_dirty = buffer.read(cx).is_dirty(); - let local_file = buffer.read(cx).file().and_then(|file| file.as_local()); - let path = local_file.map(|file| file.abs_path(cx)); let mtime = buffer.read(cx).saved_mtime(); let snapshot = buffer.read(cx).snapshot(); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 5855dcce1e5919..92a85299f47b70 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1114,18 +1114,16 @@ impl Workspace { } // Get project paths for all of the abs_paths - let mut worktree_roots: HashSet> = Default::default(); let mut project_paths: Vec<(PathBuf, Option)> = Vec::with_capacity(paths_to_open.len()); for path in paths_to_open.into_iter() { - if let Some((worktree, project_entry)) = cx + if let Some((_, project_entry)) = cx .update(|cx| { Workspace::project_path_for_path(project_handle.clone(), &path, true, cx) })? .await .log_err() { - worktree_roots.extend(worktree.update(&mut cx, |tree, _| tree.abs_path()).ok()); project_paths.push((path, Some(project_entry))); } else { project_paths.push((path, None)); @@ -5532,12 +5530,13 @@ pub fn open_ssh_project( let serialized_workspace = persistence::DB.workspace_for_ssh_project(&serialized_ssh_project); - let workspace_id = - if let Some(workspace_id) = serialized_workspace.map(|workspace| workspace.id) { - workspace_id - } else { - persistence::DB.next_id().await? - }; + let workspace_id = if let Some(workspace_id) = + serialized_workspace.as_ref().map(|workspace| workspace.id) + { + workspace_id + } else { + persistence::DB.next_id().await? + }; cx.update_window(window.into(), |_, cx| { cx.replace_root_view(|cx| { @@ -5548,7 +5547,15 @@ pub fn open_ssh_project( }); })?; - window.update(&mut cx, |_, cx| cx.activate_window()) + window + .update(&mut cx, |_, cx| { + cx.activate_window(); + + open_items(serialized_workspace, vec![], app_state, cx) + })? + .await?; + + Ok(()) }) } From ace4d5185dbd53023f8b583df781bc96f891b80a Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 10:53:06 +0200 Subject: [PATCH 236/762] settings: Show notification when user/project settings fail to parse (#18122) Closes #16876 We only ever showed parsing errors, but not if something failed to deserialize. Basically, if you had a stray `,` somewhere, we'd show a notification for user errors, but only squiggly lines if you had a `[]` instead of a `{}`. The squiggly lines would only show up when there were schema errors. In the case of `formatter` settings, for example, if someone put in a `{}` instead of `[]`, we'd never show anything. With this change we always show a notification if parsing user or project settings fails. (Right now, the error message might still be bad, but that's a separate change) Release Notes: - Added a notification to warn users if their user settings or project-local settings failed to deserialize. Demo: https://github.com/user-attachments/assets/e5c48165-f2f7-4b5c-9c6d-6ea74f678683 --- crates/language/src/language_settings.rs | 7 ++ crates/project/src/project.rs | 26 ++++++- crates/project/src/project_settings.rs | 45 +++++++++-- crates/settings/src/settings.rs | 4 +- crates/settings/src/settings_store.rs | 95 ++++++++++++++++-------- crates/workspace/src/workspace.rs | 19 ++++- crates/zed/src/main.rs | 28 ++++--- 7 files changed, 173 insertions(+), 51 deletions(-) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 77c9a1d18cee14..6121cb6a39a2ca 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -1152,6 +1152,13 @@ mod tests { ); } + #[test] + fn test_formatter_deserialization_invalid() { + let raw_auto = "{\"formatter\": {}}"; + let result: Result = serde_json::from_str(raw_auto); + assert!(result.is_err()); + } + #[test] pub fn test_resolve_language_servers() { fn language_server_names(names: &[&str]) -> Vec { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index fcf10d11c2cca7..435c1430243705 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -59,12 +59,14 @@ use node_runtime::NodeRuntime; use parking_lot::{Mutex, RwLock}; use paths::{local_tasks_file_relative_path, local_vscode_tasks_file_relative_path}; pub use prettier_store::PrettierStore; -use project_settings::{ProjectSettings, SettingsObserver}; +use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent}; use remote::SshSession; use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient, ErrorCode}; use search::{SearchInputKind, SearchQuery, SearchResult}; use search_history::SearchHistory; -use settings::{watch_config_file, Settings, SettingsLocation, SettingsStore}; +use settings::{ + watch_config_file, InvalidSettingsError, Settings, SettingsLocation, SettingsStore, +}; use smol::channel::Receiver; use snippet::Snippet; use snippet_provider::SnippetProvider; @@ -230,6 +232,7 @@ pub enum Event { LanguageServerRemoved(LanguageServerId), LanguageServerLog(LanguageServerId, LanguageServerLogType, String), Notification(String), + LocalSettingsUpdated(Result<(), InvalidSettingsError>), LanguageServerPrompt(LanguageServerPromptRequest), LanguageNotFound(Model), ActiveEntryChanged(Option), @@ -644,6 +647,8 @@ impl Project { let settings_observer = cx.new_model(|cx| { SettingsObserver::new_local(fs.clone(), worktree_store.clone(), cx) }); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); let environment = ProjectEnvironment::new(&worktree_store, env, cx); let lsp_store = cx.new_model(|cx| { @@ -729,6 +734,8 @@ impl Project { let settings_observer = cx.new_model(|cx| { SettingsObserver::new_ssh(ssh.clone().into(), worktree_store.clone(), cx) }); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); let environment = ProjectEnvironment::new(&worktree_store, None, cx); let lsp_store = cx.new_model(|cx| { @@ -913,6 +920,8 @@ impl Project { cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach(); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); let mut this = Self { buffer_ordered_messages_tx: tx, @@ -2058,6 +2067,19 @@ impl Project { } } + fn on_settings_observer_event( + &mut self, + _: Model, + event: &SettingsObserverEvent, + cx: &mut ModelContext, + ) { + match event { + SettingsObserverEvent::LocalSettingsUpdated(error) => { + cx.emit(Event::LocalSettingsUpdated(error.clone())) + } + } + } + fn on_worktree_store_event( &mut self, _: Model, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 2eeb8408961186..9a7c80703c734c 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1,11 +1,11 @@ use collections::HashMap; use fs::Fs; -use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Model, ModelContext}; +use gpui::{AppContext, AsyncAppContext, BorrowAppContext, EventEmitter, Model, ModelContext}; use paths::local_settings_file_relative_path; use rpc::{proto, AnyProtoClient, TypedEnvelope}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsStore}; +use settings::{InvalidSettingsError, Settings, SettingsSources, SettingsStore}; use std::{ path::{Path, PathBuf}, sync::Arc, @@ -176,6 +176,13 @@ pub enum SettingsObserverMode { Remote, } +#[derive(Clone, Debug, PartialEq)] +pub enum SettingsObserverEvent { + LocalSettingsUpdated(Result<(), InvalidSettingsError>), +} + +impl EventEmitter for SettingsObserver {} + pub struct SettingsObserver { mode: SettingsObserverMode, downstream_client: Option, @@ -415,11 +422,16 @@ impl SettingsObserver { ) { let worktree_id = worktree.read(cx).id(); let remote_worktree_id = worktree.read(cx).id(); - cx.update_global::(|store, cx| { + + let result = cx.update_global::>(|store, cx| { for (directory, file_content) in settings_contents { - store - .set_local_settings(worktree_id, directory.clone(), file_content.as_deref(), cx) - .log_err(); + store.set_local_settings( + worktree_id, + directory.clone(), + file_content.as_deref(), + cx, + )?; + if let Some(downstream_client) = &self.downstream_client { downstream_client .send(proto::UpdateWorktreeSettings { @@ -431,6 +443,25 @@ impl SettingsObserver { .log_err(); } } - }) + anyhow::Ok(()) + }); + + match result { + Err(error) => { + if let Ok(error) = error.downcast::() { + if let InvalidSettingsError::LocalSettings { + ref path, + ref message, + } = error + { + log::error!("Failed to set local settings in {:?}: {:?}", path, message); + cx.emit(SettingsObserverEvent::LocalSettingsUpdated(Err(error))); + } + } + } + Ok(()) => { + cx.emit(SettingsObserverEvent::LocalSettingsUpdated(Ok(()))); + } + } } } diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 5ece3f867e4ff4..f1f8591bba4525 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -13,7 +13,9 @@ pub use editable_setting_control::*; pub use json_schema::*; pub use keymap_file::KeymapFile; pub use settings_file::*; -pub use settings_store::{Settings, SettingsLocation, SettingsSources, SettingsStore}; +pub use settings_store::{ + InvalidSettingsError, Settings, SettingsLocation, SettingsSources, SettingsStore, +}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 3ef8bffe2d3ded..20bf52f2c57ef0 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -3,6 +3,7 @@ use collections::{btree_map, hash_map, BTreeMap, HashMap}; use fs::Fs; use futures::{channel::mpsc, future::LocalBoxFuture, FutureExt, StreamExt}; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Global, Task, UpdateGlobal}; +use paths::local_settings_file_relative_path; use schemars::{gen::SchemaGenerator, schema::RootSchema, JsonSchema}; use serde::{de::DeserializeOwned, Deserialize as _, Serialize}; use smallvec::SmallVec; @@ -10,7 +11,7 @@ use std::{ any::{type_name, Any, TypeId}, fmt::Debug, ops::Range, - path::Path, + path::{Path, PathBuf}, str, sync::{Arc, LazyLock}, }; @@ -694,9 +695,14 @@ impl SettingsStore { .deserialize_setting(&self.raw_extension_settings) .log_err(); - let user_settings = setting_value - .deserialize_setting(&self.raw_user_settings) - .log_err(); + let user_settings = match setting_value.deserialize_setting(&self.raw_user_settings) { + Ok(settings) => Some(settings), + Err(error) => { + return Err(anyhow!(InvalidSettingsError::UserSettings { + message: error.to_string() + })); + } + }; let mut release_channel_settings = None; if let Some(release_settings) = &self @@ -746,34 +752,43 @@ impl SettingsStore { break; } - if let Some(local_settings) = - setting_value.deserialize_setting(local_settings).log_err() - { - paths_stack.push(Some((*root_id, path.as_ref()))); - project_settings_stack.push(local_settings); - - // If a local settings file changed, then avoid recomputing local - // settings for any path outside of that directory. - if changed_local_path.map_or(false, |(changed_root_id, changed_local_path)| { - *root_id != changed_root_id || !path.starts_with(changed_local_path) - }) { - continue; - } - - if let Some(value) = setting_value - .load_setting( - SettingsSources { - default: &default_settings, - extensions: extension_settings.as_ref(), - user: user_settings.as_ref(), - release_channel: release_channel_settings.as_ref(), - project: &project_settings_stack.iter().collect::>(), + match setting_value.deserialize_setting(local_settings) { + Ok(local_settings) => { + paths_stack.push(Some((*root_id, path.as_ref()))); + project_settings_stack.push(local_settings); + + // If a local settings file changed, then avoid recomputing local + // settings for any path outside of that directory. + if changed_local_path.map_or( + false, + |(changed_root_id, changed_local_path)| { + *root_id != changed_root_id || !path.starts_with(changed_local_path) }, - cx, - ) - .log_err() - { - setting_value.set_local_value(*root_id, path.clone(), value); + ) { + continue; + } + + if let Some(value) = setting_value + .load_setting( + SettingsSources { + default: &default_settings, + extensions: extension_settings.as_ref(), + user: user_settings.as_ref(), + release_channel: release_channel_settings.as_ref(), + project: &project_settings_stack.iter().collect::>(), + }, + cx, + ) + .log_err() + { + setting_value.set_local_value(*root_id, path.clone(), value); + } + } + Err(error) => { + return Err(anyhow!(InvalidSettingsError::LocalSettings { + path: path.join(local_settings_file_relative_path()), + message: error.to_string() + })); } } } @@ -782,6 +797,24 @@ impl SettingsStore { } } +#[derive(Debug, Clone, PartialEq)] +pub enum InvalidSettingsError { + LocalSettings { path: PathBuf, message: String }, + UserSettings { message: String }, +} + +impl std::fmt::Display for InvalidSettingsError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + InvalidSettingsError::LocalSettings { message, .. } + | InvalidSettingsError::UserSettings { message } => { + write!(f, "{}", message) + } + } + } +} +impl std::error::Error for InvalidSettingsError {} + impl Debug for SettingsStore { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("SettingsStore") diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 92a85299f47b70..1fbeab38a2e8b4 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -64,7 +64,7 @@ use project::{ use remote::{SshConnectionOptions, SshSession}; use serde::Deserialize; use session::AppSession; -use settings::Settings; +use settings::{InvalidSettingsError, Settings}; use shared_screen::SharedScreen; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, @@ -832,6 +832,23 @@ impl Workspace { } } + project::Event::LocalSettingsUpdated(result) => { + struct LocalSettingsUpdated; + let id = NotificationId::unique::(); + + match result { + Err(InvalidSettingsError::LocalSettings { message, path }) => { + let full_message = + format!("Failed to set local settings in {:?}:\n{}", path, message); + this.show_notification(id, cx, |cx| { + cx.new_view(|_| MessageNotification::new(full_message.clone())) + }) + } + Err(_) => {} + Ok(_) => this.dismiss_notification(&id, cx), + } + } + project::Event::Notification(message) => { struct ProjectNotification; diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 3104001f992726..6ecdbb224f3d91 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -34,7 +34,9 @@ use parking_lot::Mutex; use recent_projects::open_ssh_project; use release_channel::{AppCommitSha, AppVersion}; use session::{AppSession, Session}; -use settings::{handle_settings_file_changes, watch_config_file, Settings, SettingsStore}; +use settings::{ + handle_settings_file_changes, watch_config_file, InvalidSettingsError, Settings, SettingsStore, +}; use simplelog::ConfigBuilder; use smol::process::Command; use std::{ @@ -626,20 +628,28 @@ fn handle_settings_changed(error: Option, cx: &mut AppContext) { for workspace in workspace::local_workspace_windows(cx) { workspace - .update(cx, |workspace, cx| match &error { - Some(error) => { - workspace.show_notification(id.clone(), cx, |cx| { - cx.new_view(|_| { - MessageNotification::new(format!("Invalid settings file\n{error}")) + .update(cx, |workspace, cx| { + match error + .as_ref() + .and_then(|error| error.downcast_ref::()) + { + Some(InvalidSettingsError::UserSettings { message }) => { + workspace.show_notification(id.clone(), cx, |cx| { + cx.new_view(|_| { + MessageNotification::new(format!( + "Invalid user settings file\n{message}" + )) .with_click_message("Open settings file") .on_click(|cx| { cx.dispatch_action(zed_actions::OpenSettings.boxed_clone()); cx.emit(DismissEvent); }) - }) - }); + }) + }); + } + None => workspace.dismiss_notification(&id, cx), + _ => {} } - None => workspace.dismiss_notification(&id, cx), }) .log_err(); } From 97708fdf43bbd15e3b978412d6682502df2f0d70 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 11:10:19 +0200 Subject: [PATCH 237/762] settings: Follow-up fix to show more errors (#18123) The condition added in #18122 was too strict. Release Notes: - N/A --- crates/zed/src/main.rs | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 6ecdbb224f3d91..d3eb97c9aa506e 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -629,26 +629,28 @@ fn handle_settings_changed(error: Option, cx: &mut AppContext) { for workspace in workspace::local_workspace_windows(cx) { workspace .update(cx, |workspace, cx| { - match error - .as_ref() - .and_then(|error| error.downcast_ref::()) - { - Some(InvalidSettingsError::UserSettings { message }) => { - workspace.show_notification(id.clone(), cx, |cx| { - cx.new_view(|_| { - MessageNotification::new(format!( - "Invalid user settings file\n{message}" - )) - .with_click_message("Open settings file") - .on_click(|cx| { - cx.dispatch_action(zed_actions::OpenSettings.boxed_clone()); - cx.emit(DismissEvent); + match error.as_ref() { + Some(error) => { + if let Some(InvalidSettingsError::LocalSettings { .. }) = + error.downcast_ref::() + { + // Local settings will be displayed by the projects + } else { + workspace.show_notification(id.clone(), cx, |cx| { + cx.new_view(|_| { + MessageNotification::new(format!( + "Invalid user settings file\n{error}" + )) + .with_click_message("Open settings file") + .on_click(|cx| { + cx.dispatch_action(zed_actions::OpenSettings.boxed_clone()); + cx.emit(DismissEvent); + }) }) - }) - }); + }); + } } None => workspace.dismiss_notification(&id, cx), - _ => {} } }) .log_err(); From ca033e647507ab8b31bc3a4a249f501ccecb0f9c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 08:35:13 -0400 Subject: [PATCH 238/762] Revert "Update nightly tag every night (#17879)" (#18133) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR reverts #17879, as it wasn't working. When a GitHub Action pushes a tag, it does not trigger workflows for push events for that tag: > When you use the repository's `GITHUB_TOKEN` to perform tasks, events triggered by the `GITHUB_TOKEN`, with the exception of `workflow_dispatch` and `repository_dispatch`, will not create a new workflow run. This prevents you from accidentally creating recursive workflow runs. For example, if a workflow run pushes code using the repository's `GITHUB_TOKEN`, a new workflow will not run even when the repository contains a workflow configured to run when `push` events occur. > > — [source](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) This reverts commit 761129e3739efacb7b8763eaa0fa8a109e935447. Release Notes: - N/A --- .github/workflows/bump_nightly_tag.yml | 23 ----------------------- .github/workflows/release_nightly.yml | 3 +++ 2 files changed, 3 insertions(+), 23 deletions(-) delete mode 100644 .github/workflows/bump_nightly_tag.yml diff --git a/.github/workflows/bump_nightly_tag.yml b/.github/workflows/bump_nightly_tag.yml deleted file mode 100644 index 0959ae9677142c..00000000000000 --- a/.github/workflows/bump_nightly_tag.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Update Nightly Tag - -on: - schedule: - # Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) - - cron: "0 7 * * *" - -jobs: - update-nightly-tag: - if: github.repository_owner == 'zed-industries' - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 - with: - fetch-depth: 0 - - - name: Update nightly tag - run: | - git config user.name github-actions - git config user.email github-actions@github.com - git tag -f nightly - git push origin nightly --force diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 17db66a264bdc0..bcaa60b77589c1 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -1,6 +1,9 @@ name: Release Nightly on: + schedule: + # Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) + - cron: "0 7 * * *" push: tags: - "nightly" From 90a12f55642410e38df65d7f8381d6ecb3d0c1c2 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 14:35:45 +0200 Subject: [PATCH 239/762] ssh remoting: Do not double-register LspAdapters (#18132) This fixes the bug with hover tooltips appearing multiple times. Turns out everytime we receive the `CreateLanguageServer` message we'd add a new adapter but only have a single server running for all of them. And we send a `CreateLanguageServer` message everytime you open a buffer. What this does is to only add a new adapter if it hasn't already been registered, which is also what we do locally. Release Notes: - N/A --- crates/language/src/language_registry.rs | 34 ++++++++++++++++-- crates/project/src/lsp_store.rs | 44 ++++++++++++------------ 2 files changed, 54 insertions(+), 24 deletions(-) diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 17ebef50e8162e..e264517d5b0300 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -326,13 +326,43 @@ impl LanguageRegistry { Some(load_lsp_adapter()) } - pub fn register_lsp_adapter(&self, language_name: LanguageName, adapter: Arc) { + pub fn register_lsp_adapter( + &self, + language_name: LanguageName, + adapter: Arc, + ) -> Arc { + let cached = CachedLspAdapter::new(adapter); self.state .write() .lsp_adapters .entry(language_name) .or_default() - .push(CachedLspAdapter::new(adapter)); + .push(cached.clone()); + cached + } + + pub fn get_or_register_lsp_adapter( + &self, + language_name: LanguageName, + server_name: LanguageServerName, + build_adapter: impl FnOnce() -> Arc + 'static, + ) -> Arc { + let registered = self + .state + .write() + .lsp_adapters + .entry(language_name.clone()) + .or_default() + .iter() + .find(|cached_adapter| cached_adapter.name == server_name) + .cloned(); + + if let Some(found) = registered { + found + } else { + let adapter = build_adapter(); + self.register_lsp_adapter(language_name, adapter) + } } /// Register a fake language server and adapter diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 5c32c9030db3ff..92f37f87af4056 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4475,7 +4475,7 @@ impl LspStore { mut cx: AsyncAppContext, ) -> Result { let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let name = LanguageServerName::from_proto(envelope.payload.name); + let server_name = LanguageServerName::from_proto(envelope.payload.name); let binary = envelope .payload @@ -4494,6 +4494,14 @@ impl LspStore { let matcher: LanguageMatcher = serde_json::from_str(&language.matcher)?; this.update(&mut cx, |this, cx| { + let Some(worktree) = this + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + else { + return Err(anyhow!("worktree not found")); + }; + this.languages .register_language(language_name.clone(), None, matcher.clone(), { let language_name = language_name.clone(); @@ -4513,28 +4521,20 @@ impl LspStore { .spawn(this.languages.language_for_name(language_name.0.as_ref())) .detach(); - let adapter = Arc::new(SshLspAdapter::new( - name, - binary, - envelope.payload.initialization_options, - envelope.payload.code_action_kinds, - )); - - this.languages - .register_lsp_adapter(language_name.clone(), adapter.clone()); - let Some(worktree) = this - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - else { - return Err(anyhow!("worktree not found")); - }; - this.start_language_server( - &worktree, - CachedLspAdapter::new(adapter), - language_name, - cx, + let adapter = this.languages.get_or_register_lsp_adapter( + language_name.clone(), + server_name.clone(), + || { + Arc::new(SshLspAdapter::new( + server_name, + binary, + envelope.payload.initialization_options, + envelope.payload.code_action_kinds, + )) + }, ); + + this.start_language_server(&worktree, adapter, language_name, cx); Ok(()) })??; Ok(proto::Ack {}) From 16d2afc662ae43cd404279c068bb26557306b9c7 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 08:46:23 -0400 Subject: [PATCH 240/762] ci: Bump `nightly` tag on scheduled Nightly builds (#18134) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR makes it so after a scheduled Nightly build we also update the `nightly` tag to keep things in sync. It's safe to bump the tag within this Action, as it won't trigger another Nightly build due to GitHub's recursive Action protections: > When you use the repository's `GITHUB_TOKEN` to perform tasks, events triggered by the `GITHUB_TOKEN`, with the exception of `workflow_dispatch` and `repository_dispatch`, will not create a new workflow run. This prevents you from accidentally creating recursive workflow runs. For example, if a workflow run pushes code using the repository's `GITHUB_TOKEN`, a new workflow will not run even when the repository contains a workflow configured to run when `push` events occur. > > — [source](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) Release Notes: - N/A --- .github/workflows/release_nightly.yml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index bcaa60b77589c1..2b973dcddc3d6f 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -171,3 +171,28 @@ jobs: - name: Upload Zed Nightly run: script/upload-nightly linux-targz + + update-nightly-tag: + name: Update nightly tag + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + needs: + - bundle-mac + - bundle-linux-x86 + - bundle-linux-arm + steps: + - name: Checkout repo + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + fetch-depth: 0 + + - name: Update nightly tag + run: | + if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then + echo "Nightly tag already points to current commit. Skipping tagging." + exit 0 + fi + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f nightly + git push origin nightly --force From d6c184b494a0c9a9a46d4ffdb5483ba65967ab0b Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 20 Sep 2024 09:23:11 -0400 Subject: [PATCH 241/762] Detect 'MD' extension as Markdown (#18135) --- crates/languages/src/markdown/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/markdown/config.toml b/crates/languages/src/markdown/config.toml index 6b518ec8b60401..ce3b294b4efed3 100644 --- a/crates/languages/src/markdown/config.toml +++ b/crates/languages/src/markdown/config.toml @@ -1,6 +1,6 @@ name = "Markdown" grammar = "markdown" -path_suffixes = ["md", "mdx", "mdwn", "markdown"] +path_suffixes = ["md", "mdx", "mdwn", "markdown", "MD"] word_characters = ["-"] brackets = [ { start = "{", end = "}", close = true, newline = true }, From 5f1046b3cd5290112f6dd464e49bc58661fd2179 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 20 Sep 2024 10:28:22 -0400 Subject: [PATCH 242/762] Make evals handle failures more gracefully (#18082) Now when an individual project eval fails, instead of panicking we add it to a list of failures that we collect and report at the end (and make the exit code nonzero). Release Notes: - N/A --- crates/evals/src/eval.rs | 315 ++++++++++++++++++++++++--------------- 1 file changed, 195 insertions(+), 120 deletions(-) diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 708cfa7511a402..0580053373c54c 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -12,13 +12,16 @@ use language::LanguageRegistry; use node_runtime::FakeNodeRuntime; use open_ai::OpenAiEmbeddingModel; use project::Project; -use semantic_index::{OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status}; +use semantic_index::{ + EmbeddingProvider, OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status, +}; use serde::{Deserialize, Serialize}; use settings::SettingsStore; use smol::channel::bounded; use smol::io::AsyncReadExt; use smol::Timer; use std::ops::RangeInclusive; +use std::path::PathBuf; use std::time::Duration; use std::{ fs, @@ -237,6 +240,14 @@ async fn fetch_code_search_net_resources(http_client: &dyn HttpClient) -> Result Ok(()) } +#[derive(Default, Debug)] +struct Counts { + covered_results: usize, + overlapped_results: usize, + covered_files: usize, + total_results: usize, +} + async fn run_evaluation( only_repo: Option, executor: &BackgroundExecutor, @@ -297,12 +308,11 @@ async fn run_evaluation( cx.update(|cx| languages::init(language_registry.clone(), node_runtime.clone(), cx)) .unwrap(); - let mut covered_result_count = 0; - let mut overlapped_result_count = 0; - let mut covered_file_count = 0; - let mut total_result_count = 0; + let mut counts = Counts::default(); eprint!("Running evals."); + let mut failures = Vec::new(); + for evaluation_project in evaluations { if only_repo .as_ref() @@ -314,27 +324,24 @@ async fn run_evaluation( eprint!("\r\x1B[2K"); eprint!( "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. Project: {}...", - covered_result_count, - total_result_count, - overlapped_result_count, - total_result_count, - covered_file_count, - total_result_count, + counts.covered_results, + counts.total_results, + counts.overlapped_results, + counts.total_results, + counts.covered_files, + counts.total_results, evaluation_project.repo ); - let repo_db_path = - db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_"))); - let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider.clone(), cx) - .await - .unwrap(); - let repo_dir = repos_dir.join(&evaluation_project.repo); if !repo_dir.exists() || repo_dir.join(SKIP_EVAL_PATH).exists() { eprintln!("Skipping {}: directory not found", evaluation_project.repo); continue; } + let repo_db_path = + db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_"))); + let project = cx .update(|cx| { Project::local( @@ -349,125 +356,193 @@ async fn run_evaluation( }) .unwrap(); - let (worktree, _) = project - .update(cx, |project, cx| { - project.find_or_create_worktree(repo_dir, true, cx) - })? - .await?; + let repo = evaluation_project.repo.clone(); + if let Err(err) = run_eval_project( + evaluation_project, + &user_store, + repo_db_path, + &repo_dir, + &mut counts, + project, + embedding_provider.clone(), + fs.clone(), + cx, + ) + .await + { + eprintln!("{repo} eval failed with error: {:?}", err); + + failures.push((repo, err)); + } + } - worktree - .update(cx, |worktree, _| { - worktree.as_local().unwrap().scan_complete() - }) - .unwrap() - .await; + eprintln!( + "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. {} failed.", + counts.covered_results, + counts.total_results, + counts.overlapped_results, + counts.total_results, + counts.covered_files, + counts.total_results, + failures.len(), + ); - let project_index = cx - .update(|cx| semantic_index.create_project_index(project.clone(), cx)) - .unwrap(); - wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await; + if failures.is_empty() { + Ok(()) + } else { + eprintln!("Failures:\n"); - for query in evaluation_project.queries { - let results = cx - .update(|cx| { + for (index, (repo, failure)) in failures.iter().enumerate() { + eprintln!("Failure #{} - {repo}\n{:?}", index + 1, failure); + } + + Err(anyhow::anyhow!("Some evals failed.")) + } +} + +#[allow(clippy::too_many_arguments)] +async fn run_eval_project( + evaluation_project: EvaluationProject, + user_store: &Model, + repo_db_path: PathBuf, + repo_dir: &Path, + counts: &mut Counts, + project: Model, + embedding_provider: Arc, + fs: Arc, + cx: &mut AsyncAppContext, +) -> Result<(), anyhow::Error> { + let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider, cx).await?; + + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree(repo_dir, true, cx) + })? + .await?; + + worktree + .update(cx, |worktree, _| { + worktree.as_local().unwrap().scan_complete() + })? + .await; + + let project_index = cx.update(|cx| semantic_index.create_project_index(project.clone(), cx))?; + wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await; + + for query in evaluation_project.queries { + let results = { + // Retry search up to 3 times in case of timeout, network failure, etc. + let mut retries_remaining = 3; + let mut result; + + loop { + match cx.update(|cx| { let project_index = project_index.read(cx); project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx) - }) - .unwrap() - .await - .unwrap(); - - let results = SemanticDb::load_results(results, &fs.clone(), &cx) - .await - .unwrap(); - - let mut project_covered_result_count = 0; - let mut project_overlapped_result_count = 0; - let mut project_covered_file_count = 0; - let mut covered_result_indices = Vec::new(); - for expected_result in &query.expected_results { - let mut file_matched = false; - let mut range_overlapped = false; - let mut range_covered = false; - - for (ix, result) in results.iter().enumerate() { - if result.path.as_ref() == Path::new(&expected_result.file) { - file_matched = true; - let start_matched = - result.row_range.contains(&expected_result.lines.start()); - let end_matched = result.row_range.contains(&expected_result.lines.end()); - - if start_matched || end_matched { - range_overlapped = true; - } - - if start_matched && end_matched { - range_covered = true; - covered_result_indices.push(ix); + }) { + Ok(task) => match task.await { + Ok(answer) => { + result = Ok(answer); break; } + Err(err) => { + result = Err(err); + } + }, + Err(err) => { + result = Err(err); } } - if range_covered { - project_covered_result_count += 1 - }; - if range_overlapped { - project_overlapped_result_count += 1 - }; - if file_matched { - project_covered_file_count += 1 - }; + if retries_remaining > 0 { + eprintln!( + "Retrying search after it failed on query {:?} with {:?}", + query, result + ); + retries_remaining -= 1; + } else { + eprintln!( + "Ran out of retries; giving up on search which failed on query {:?} with {:?}", + query, result + ); + break; + } } - let outcome_repo = evaluation_project.repo.clone(); - - let query_results = EvaluationQueryOutcome { - repo: outcome_repo, - query: query.query, - total_result_count: query.expected_results.len(), - covered_result_count: project_covered_result_count, - overlapped_result_count: project_overlapped_result_count, - covered_file_count: project_covered_file_count, - expected_results: query.expected_results, - actual_results: results - .iter() - .map(|result| EvaluationSearchResult { - file: result.path.to_string_lossy().to_string(), - lines: result.row_range.clone(), - }) - .collect(), - covered_result_indices, - }; - overlapped_result_count += query_results.overlapped_result_count; - covered_result_count += query_results.covered_result_count; - covered_file_count += query_results.covered_file_count; - total_result_count += query_results.total_result_count; + SemanticDb::load_results(result?, &fs.clone(), &cx).await? + }; - println!("{}", serde_json::to_string(&query_results).unwrap()); + let mut project_covered_result_count = 0; + let mut project_overlapped_result_count = 0; + let mut project_covered_file_count = 0; + let mut covered_result_indices = Vec::new(); + for expected_result in &query.expected_results { + let mut file_matched = false; + let mut range_overlapped = false; + let mut range_covered = false; + + for (ix, result) in results.iter().enumerate() { + if result.path.as_ref() == Path::new(&expected_result.file) { + file_matched = true; + let start_matched = result.row_range.contains(&expected_result.lines.start()); + let end_matched = result.row_range.contains(&expected_result.lines.end()); + + if start_matched || end_matched { + range_overlapped = true; + } + + if start_matched && end_matched { + range_covered = true; + covered_result_indices.push(ix); + break; + } + } + } + + if range_covered { + project_covered_result_count += 1 + }; + if range_overlapped { + project_overlapped_result_count += 1 + }; + if file_matched { + project_covered_file_count += 1 + }; } + let outcome_repo = evaluation_project.repo.clone(); + + let query_results = EvaluationQueryOutcome { + repo: outcome_repo, + query: query.query, + total_result_count: query.expected_results.len(), + covered_result_count: project_covered_result_count, + overlapped_result_count: project_overlapped_result_count, + covered_file_count: project_covered_file_count, + expected_results: query.expected_results, + actual_results: results + .iter() + .map(|result| EvaluationSearchResult { + file: result.path.to_string_lossy().to_string(), + lines: result.row_range.clone(), + }) + .collect(), + covered_result_indices, + }; - user_store - .update(cx, |_, _| { - drop(semantic_index); - drop(project); - drop(worktree); - drop(project_index); - }) - .unwrap(); - } + counts.overlapped_results += query_results.overlapped_result_count; + counts.covered_results += query_results.covered_result_count; + counts.covered_files += query_results.covered_file_count; + counts.total_results += query_results.total_result_count; - eprint!( - "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured.", - covered_result_count, - total_result_count, - overlapped_result_count, - total_result_count, - covered_file_count, - total_result_count, - ); + println!("{}", serde_json::to_string(&query_results)?); + } - Ok(()) + user_store.update(cx, |_, _| { + drop(semantic_index); + drop(project); + drop(worktree); + drop(project_index); + }) } async fn wait_for_indexing_complete( @@ -524,7 +599,7 @@ async fn fetch_eval_repos( let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json"); let evaluations: Vec = serde_json::from_slice(&evaluations).unwrap(); - eprint!("Fetching evaluation repositories..."); + eprintln!("Fetching evaluation repositories..."); executor .scoped(move |scope| { From ab1d466c5f46fbaf84615dc39f142cfe0c0880e3 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 10:48:27 -0400 Subject: [PATCH 243/762] Remove `replica_id` from `MultiBuffer`s (#18141) This PR removes the `replica_id` field from the `MultiBuffer` struct. We were only ever referencing this field to pass when constructing a `MultiBuffer`, and never used it outside of that. Release Notes: - N/A --- crates/assistant/src/assistant_panel.rs | 5 +- crates/assistant/src/inline_assistant.rs | 2 +- crates/collab/src/tests/following_tests.rs | 2 +- .../src/copilot_completion_provider.rs | 4 +- crates/diagnostics/src/diagnostics.rs | 7 +-- crates/editor/src/display_map/block_map.rs | 2 +- crates/editor/src/editor.rs | 22 ++------ crates/editor/src/editor_tests.rs | 20 +++---- crates/editor/src/git.rs | 2 +- crates/editor/src/hunk_diff.rs | 2 +- crates/editor/src/inlay_hint_cache.rs | 4 +- crates/editor/src/items.rs | 3 +- crates/editor/src/movement.rs | 2 +- crates/editor/src/test/editor_test_context.rs | 2 +- crates/multi_buffer/src/multi_buffer.rs | 56 ++++++++----------- crates/search/src/project_search.rs | 3 +- 16 files changed, 54 insertions(+), 84 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 364c6f9663120c..22237eeb079270 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -2814,9 +2814,8 @@ impl ContextEditor { } else { // If there are multiple buffers or suggestion groups, create a multibuffer let multibuffer = cx.new_model(|cx| { - let replica_id = project.read(cx).replica_id(); - let mut multibuffer = MultiBuffer::new(replica_id, Capability::ReadWrite) - .with_title(resolved_step.title.clone()); + let mut multibuffer = + MultiBuffer::new(Capability::ReadWrite).with_title(resolved_step.title.clone()); for (buffer, groups) in &resolved_step.suggestion_groups { let excerpt_ids = multibuffer.push_excerpts( buffer.clone(), diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 428b33f3bbd92c..d95b54d3c6a6ac 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1145,7 +1145,7 @@ impl InlineAssistant { let deleted_lines_editor = cx.new_view(|cx| { let multi_buffer = cx.new_model(|_| { - MultiBuffer::without_headers(0, language::Capability::ReadOnly) + MultiBuffer::without_headers(language::Capability::ReadOnly) }); multi_buffer.update(cx, |multi_buffer, cx| { multi_buffer.push_excerpts( diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index e66b66a1b45893..9a39d6f3eb2e74 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -289,7 +289,7 @@ async fn test_basic_following( .get_open_buffer(&(worktree_id, "2.txt").into(), cx) .unwrap() }); - let mut result = MultiBuffer::new(0, Capability::ReadWrite); + let mut result = MultiBuffer::new(Capability::ReadWrite); result.push_excerpts( buffer_a1, [ExcerptRange { diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index c54fefad6fe599..3a3361cda1996d 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -767,7 +767,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("c = 3\nd = 4\n", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ExcerptRange { @@ -1018,7 +1018,7 @@ mod tests { .unwrap(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); multibuffer.push_excerpts( private_buffer.clone(), [ExcerptRange { diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index eec4f735ec38e5..687638854209ba 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -156,12 +156,7 @@ impl ProjectDiagnosticsEditor { cx.on_focus_out(&focus_handle, |this, _event, cx| this.focus_out(cx)) .detach(); - let excerpts = cx.new_model(|cx| { - MultiBuffer::new( - project_handle.read(cx).replica_id(), - project_handle.read(cx).capability(), - ) - }); + let excerpts = cx.new_model(|cx| MultiBuffer::new(project_handle.read(cx).capability())); let editor = cx.new_view(|cx| { let mut editor = Editor::for_multibuffer(excerpts.clone(), Some(project_handle.clone()), false, cx); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 3a298832dee5ef..efa026a56c6101 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1671,7 +1671,7 @@ mod tests { let mut excerpt_ids = Vec::new(); let multi_buffer = cx.new_model(|cx| { - let mut multi_buffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); excerpt_ids.extend(multi_buffer.push_excerpts( buffer1.clone(), [ExcerptRange { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f797f82832f0ad..eb2dafc24dc0f3 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2155,10 +2155,6 @@ impl Editor { }); } - pub fn replica_id(&self, cx: &AppContext) -> ReplicaId { - self.buffer.read(cx).replica_id() - } - pub fn leader_peer_id(&self) -> Option { self.leader_peer_id } @@ -4758,8 +4754,6 @@ impl Editor { title: String, mut cx: AsyncWindowContext, ) -> Result<()> { - let replica_id = this.update(&mut cx, |this, cx| this.replica_id(cx))?; - let mut entries = transaction.0.into_iter().collect::>(); cx.update(|cx| { entries.sort_unstable_by_key(|(buffer, _)| { @@ -4802,8 +4796,7 @@ impl Editor { let mut ranges_to_highlight = Vec::new(); let excerpt_buffer = cx.new_model(|cx| { - let mut multibuffer = - MultiBuffer::new(replica_id, Capability::ReadWrite).with_title(title); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite).with_title(title); for (buffer_handle, transaction) in &entries { let buffer = buffer_handle.read(cx); ranges_to_highlight.extend( @@ -9610,7 +9603,6 @@ impl Editor { }) }) } else if !definitions.is_empty() { - let replica_id = self.replica_id(cx); cx.spawn(|editor, mut cx| async move { let (title, location_tasks, workspace) = editor .update(&mut cx, |editor, cx| { @@ -9663,9 +9655,7 @@ impl Editor { }; let opened = workspace .update(&mut cx, |workspace, cx| { - Self::open_locations_in_multibuffer( - workspace, locations, replica_id, title, split, cx, - ) + Self::open_locations_in_multibuffer(workspace, locations, title, split, cx) }) .ok(); @@ -9762,7 +9752,6 @@ impl Editor { } let (buffer, head) = multi_buffer.text_anchor_for_position(head, cx)?; - let replica_id = self.replica_id(cx); let workspace = self.workspace()?; let project = workspace.read(cx).project().clone(); let references = project.update(cx, |project, cx| project.references(&buffer, head, cx)); @@ -9803,9 +9792,7 @@ impl Editor { ) }) .unwrap(); - Self::open_locations_in_multibuffer( - workspace, locations, replica_id, title, false, cx, - ); + Self::open_locations_in_multibuffer(workspace, locations, title, false, cx); Navigated::Yes }) })) @@ -9815,7 +9802,6 @@ impl Editor { pub fn open_locations_in_multibuffer( workspace: &mut Workspace, mut locations: Vec, - replica_id: ReplicaId, title: String, split: bool, cx: &mut ViewContext, @@ -9827,7 +9813,7 @@ impl Editor { let capability = workspace.project().read(cx).capability(); let excerpt_buffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(replica_id, capability); + let mut multibuffer = MultiBuffer::new(capability); while let Some(location) = locations.next() { let buffer = location.buffer.read(cx); let mut ranges_for_buffer = Vec::new(); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index e11b38ba59680d..589673447d7f48 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -2822,7 +2822,7 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) { Buffer::local("const c: usize = 3;\n", cx).with_language(rust_language, cx) }); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( toml_buffer.clone(), [ExcerptRange { @@ -6671,7 +6671,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) { .unwrap(); let multi_buffer = cx.new_model(|cx| { - let mut multi_buffer = MultiBuffer::new(0, ReadWrite); + let mut multi_buffer = MultiBuffer::new(ReadWrite); multi_buffer.push_excerpts( buffer_1.clone(), [ @@ -8614,7 +8614,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer.clone(), [ @@ -8698,7 +8698,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { }); let buffer = cx.new_model(|cx| Buffer::local(initial_text, cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts(buffer, excerpt_ranges, cx); multibuffer }); @@ -8757,7 +8757,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); let mut excerpt1_id = None; let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); excerpt1_id = multibuffer .push_excerpts( buffer.clone(), @@ -8842,7 +8842,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); let mut excerpt1_id = None; let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); excerpt1_id = multibuffer .push_excerpts( buffer.clone(), @@ -9230,7 +9230,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) { let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); let leader = pane.update(cx, |_, cx| { - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(ReadWrite)); cx.new_view(|cx| build_editor(multibuffer.clone(), cx)) }); @@ -10685,7 +10685,7 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) { diff_every_buffer_row(&buffer_3, sample_text_3.clone(), cols, cx); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ @@ -10825,7 +10825,7 @@ async fn test_mutlibuffer_in_navigation_history(cx: &mut gpui::TestAppContext) { let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text_3.clone(), cx)); let multi_buffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ @@ -11764,7 +11764,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) }); let multi_buffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index 665c649e6e8941..63b083faa89bd5 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -195,7 +195,7 @@ mod tests { cx.background_executor.run_until_parked(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 5dc73634bda774..361ea6246e308b 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -764,7 +764,7 @@ fn editor_with_deleted_text( let parent_editor = cx.view().downgrade(); let editor = cx.new_view(|cx| { let multi_buffer = - cx.new_model(|_| MultiBuffer::without_headers(0, language::Capability::ReadOnly)); + cx.new_model(|_| MultiBuffer::without_headers(language::Capability::ReadOnly)); multi_buffer.update(cx, |multi_buffer, cx| { multi_buffer.push_excerpts( diff_base_buffer, diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index 24ccf64c4ca0b2..ca2db70a70c2ad 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -2607,7 +2607,7 @@ pub mod tests { .await .unwrap(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ @@ -2957,7 +2957,7 @@ pub mod tests { }) .await .unwrap(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| { let buffer_1_excerpts = multibuffer.push_excerpts( buffer_1.clone(), diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 3d04eb82d38e39..1d301f2ee68cd6 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -68,7 +68,6 @@ impl FollowableItem for Editor { unreachable!() }; - let replica_id = project.read(cx).replica_id(); let buffer_ids = state .excerpts .iter() @@ -92,7 +91,7 @@ impl FollowableItem for Editor { if state.singleton && buffers.len() == 1 { multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx) } else { - multibuffer = MultiBuffer::new(replica_id, project.read(cx).capability()); + multibuffer = MultiBuffer::new(project.read(cx).capability()); let mut excerpts = state.excerpts.into_iter().peekable(); while let Some(excerpt) = excerpts.peek() { let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else { diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index a9f27d53a62ef0..19e2a4ea95a676 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -928,7 +928,7 @@ mod tests { let buffer = cx.new_model(|cx| Buffer::local("abc\ndefg\nhijkl\nmn", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.push_excerpts( buffer.clone(), [ diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 6f8a495895fff9..3e4ef174d422ae 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -75,7 +75,7 @@ impl EditorTestContext { cx: &mut gpui::TestAppContext, excerpts: [&str; COUNT], ) -> EditorTestContext { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); let buffer = cx.new_model(|cx| { for excerpt in excerpts.into_iter() { let (text, ranges) = marked_text_ranges(excerpt, false); diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index c163dbc07a6407..f6a61f562a71fc 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -67,7 +67,6 @@ pub struct MultiBuffer { subscriptions: Topic, /// If true, the multi-buffer only contains a single [`Buffer`] and a single [`Excerpt`] singleton: bool, - replica_id: ReplicaId, history: History, title: Option, capability: Capability, @@ -350,7 +349,7 @@ impl std::ops::Deref for MultiBufferIndentGuide { } impl MultiBuffer { - pub fn new(replica_id: ReplicaId, capability: Capability) -> Self { + pub fn new(capability: Capability) -> Self { Self { snapshot: RefCell::new(MultiBufferSnapshot { show_headers: true, @@ -360,7 +359,6 @@ impl MultiBuffer { subscriptions: Topic::default(), singleton: false, capability, - replica_id, title: None, history: History { next_transaction_id: clock::Lamport::default(), @@ -372,14 +370,13 @@ impl MultiBuffer { } } - pub fn without_headers(replica_id: ReplicaId, capability: Capability) -> Self { + pub fn without_headers(capability: Capability) -> Self { Self { snapshot: Default::default(), buffers: Default::default(), subscriptions: Default::default(), singleton: false, capability, - replica_id, history: History { next_transaction_id: Default::default(), undo_stack: Default::default(), @@ -414,7 +411,6 @@ impl MultiBuffer { subscriptions: Default::default(), singleton: self.singleton, capability: self.capability, - replica_id: self.replica_id, history: self.history.clone(), title: self.title.clone(), } @@ -430,7 +426,7 @@ impl MultiBuffer { } pub fn singleton(buffer: Model, cx: &mut ModelContext) -> Self { - let mut this = Self::new(buffer.read(cx).replica_id(), buffer.read(cx).capability()); + let mut this = Self::new(buffer.read(cx).capability()); this.singleton = true; this.push_excerpts( buffer, @@ -444,10 +440,6 @@ impl MultiBuffer { this } - pub fn replica_id(&self) -> ReplicaId { - self.replica_id - } - /// Returns an up-to-date snapshot of the MultiBuffer. pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { self.sync(cx); @@ -2011,7 +2003,7 @@ impl MultiBuffer { excerpts: [(&str, Vec>); COUNT], cx: &mut gpui::AppContext, ) -> Model { - let multi = cx.new_model(|_| Self::new(0, Capability::ReadWrite)); + let multi = cx.new_model(|_| Self::new(Capability::ReadWrite)); for (text, ranges) in excerpts { let buffer = cx.new_model(|cx| Buffer::local(text, cx)); let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange { @@ -2032,7 +2024,7 @@ impl MultiBuffer { pub fn build_random(rng: &mut impl rand::Rng, cx: &mut gpui::AppContext) -> Model { cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); let mutation_count = rng.gen_range(1..=5); multibuffer.randomly_edit_excerpts(rng, mutation_count, cx); multibuffer @@ -5063,7 +5055,7 @@ mod tests { fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let events = Arc::new(RwLock::new(Vec::::new())); multibuffer.update(cx, |_, cx| { @@ -5306,8 +5298,8 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'm'), cx)); - let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); - let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let follower_edit_event_count = Arc::new(RwLock::new(0)); follower_multibuffer.update(cx, |_, cx| { @@ -5410,7 +5402,7 @@ mod tests { #[gpui::test] fn test_expand_excerpts(cx: &mut AppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts_with_context_lines( @@ -5486,7 +5478,7 @@ mod tests { #[gpui::test] fn test_push_excerpts_with_context_lines(cx: &mut AppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts_with_context_lines( buffer.clone(), @@ -5539,7 +5531,7 @@ mod tests { #[gpui::test] async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { let snapshot = buffer.read(cx); let ranges = vec![ @@ -5589,7 +5581,7 @@ mod tests { #[gpui::test] fn test_empty_multibuffer(cx: &mut AppContext) { - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let snapshot = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot.text(), ""); @@ -5628,7 +5620,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("efghi", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ExcerptRange { @@ -5685,7 +5677,7 @@ mod tests { fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); // Create an insertion id in buffer 1 that doesn't exist in buffer 2. // Add an excerpt from buffer 1 that spans this new insertion. @@ -5819,7 +5811,7 @@ mod tests { .unwrap_or(10); let mut buffers: Vec> = Vec::new(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_ids = Vec::::new(); let mut expected_excerpts = Vec::<(Model, Range)>::new(); let mut anchors = Vec::new(); @@ -6283,7 +6275,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("1234", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("5678", cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let group_interval = multibuffer.read(cx).history.group_interval; multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( @@ -6418,7 +6410,7 @@ mod tests { fn test_excerpts_in_ranges_no_ranges(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( buffer_1.clone(), @@ -6496,7 +6488,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut expected_excerpt_id = ExcerptId(0); multibuffer.update(cx, |multibuffer, cx| { @@ -6557,7 +6549,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); @@ -6623,7 +6615,7 @@ mod tests { let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'r'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); let mut excerpt_3_id = ExcerptId(0); @@ -6698,7 +6690,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); @@ -6764,7 +6756,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); @@ -6829,7 +6821,7 @@ mod tests { fn test_split_ranges(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( buffer_1.clone(), @@ -6885,7 +6877,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'm'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( buffer_1.clone(), diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index c43d4ed4544e4f..fac3c55bf45506 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -176,12 +176,11 @@ pub struct ProjectSearchBar { impl ProjectSearch { pub fn new(project: Model, cx: &mut ModelContext) -> Self { - let replica_id = project.read(cx).replica_id(); let capability = project.read(cx).capability(); Self { project, - excerpts: cx.new_model(|_| MultiBuffer::new(replica_id, capability)), + excerpts: cx.new_model(|_| MultiBuffer::new(capability)), pending_search: Default::default(), match_ranges: Default::default(), active_query: None, From 759646e0a35a2c4586817b79028cb347e3749de4 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 11:45:03 -0400 Subject: [PATCH 244/762] editor: Improve rewrapping when working with comments at different indentation levels (#18146) This PR improves the `editor::Rewrap` command when working with comments that were not all at the same indentation level. We now use a heuristic of finding the most common indentation level for each line, using the deepest indent in the event of a tie. It also removes an `.unwrap()` that would previously lead to a panic in this case. Instead of unwrapping we now log an error to the logs and skip rewrapping for that selection. Release Notes: - Improved the behavior of `editor: rewrap` when working with a selection that contained comments at different indentation levels. --- crates/editor/src/editor.rs | 46 ++++++++++++++++--- crates/editor/src/editor_tests.rs | 74 +++++++++++++++++++++++++++++++ crates/language/src/buffer.rs | 4 +- 3 files changed, 116 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index eb2dafc24dc0f3..33eb51cb0ecb77 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6736,9 +6736,31 @@ impl Editor { } } - let row = selection.head().row; - let indent_size = buffer.indent_size_for_line(MultiBufferRow(row)); - let indent_end = Point::new(row, indent_size.len); + // Since not all lines in the selection may be at the same indent + // level, choose the indent size that is the most common between all + // of the lines. + // + // If there is a tie, we use the deepest indent. + let (indent_size, indent_end) = { + let mut indent_size_occurrences = HashMap::default(); + let mut rows_by_indent_size = HashMap::>::default(); + + for row in start_row..=end_row { + let indent = buffer.indent_size_for_line(MultiBufferRow(row)); + rows_by_indent_size.entry(indent).or_default().push(row); + *indent_size_occurrences.entry(indent).or_insert(0) += 1; + } + + let indent_size = indent_size_occurrences + .into_iter() + .max_by_key(|(indent, count)| (*count, indent.len)) + .map(|(indent, _)| indent) + .unwrap_or_default(); + let row = rows_by_indent_size[&indent_size][0]; + let indent_end = Point::new(row, indent_size.len); + + (indent_size, indent_end) + }; let mut line_prefix = indent_size.chars().collect::(); @@ -6788,10 +6810,22 @@ impl Editor { let start = Point::new(start_row, 0); let end = Point::new(end_row, buffer.line_len(MultiBufferRow(end_row))); let selection_text = buffer.text_for_range(start..end).collect::(); - let unwrapped_text = selection_text + let Some(lines_without_prefixes) = selection_text .lines() - .map(|line| line.strip_prefix(&line_prefix).unwrap()) - .join(" "); + .map(|line| { + line.strip_prefix(&line_prefix) + .or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start())) + .ok_or_else(|| { + anyhow!("line did not start with prefix {line_prefix:?}: {line:?}") + }) + }) + .collect::, _>>() + .log_err() + else { + continue; + }; + + let unwrapped_text = lines_without_prefixes.join(" "); let wrap_column = buffer .settings_at(Point::new(start_row, 0), cx) .preferred_line_length as usize; diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 589673447d7f48..85684db8181333 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4249,6 +4249,80 @@ async fn test_rewrap(cx: &mut TestAppContext) { cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); cx.assert_editor_state(wrapped_text); } + + // Test rewrapping unaligned comments in a selection. + { + let language = Arc::new(Language::new( + LanguageConfig { + line_comments: vec!["// ".into(), "/// ".into()], + ..LanguageConfig::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + let unwrapped_text = indoc! {" + fn foo() { + if true { + « // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae. + // Praesent semper egestas tellus id dignissim.ˇ» + do_something(); + } else { + // + } + + } + "}; + + let wrapped_text = indoc! {" + fn foo() { + if true { + // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus + // mollis elit purus, a ornare lacus gravida vitae. Praesent semper + // egestas tellus id dignissim.ˇ + do_something(); + } else { + // + } + + } + "}; + + cx.set_state(unwrapped_text); + cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); + cx.assert_editor_state(wrapped_text); + + let unwrapped_text = indoc! {" + fn foo() { + if true { + «ˇ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae. + // Praesent semper egestas tellus id dignissim.» + do_something(); + } else { + // + } + + } + "}; + + let wrapped_text = indoc! {" + fn foo() { + if true { + // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus + // mollis elit purus, a ornare lacus gravida vitae. Praesent semper + // egestas tellus id dignissim.ˇ + do_something(); + } else { + // + } + + } + "}; + + cx.set_state(unwrapped_text); + cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); + cx.assert_editor_state(wrapped_text); + } } #[gpui::test] diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 08fc1ccdb45d5b..acb57273e30eed 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -144,7 +144,7 @@ pub struct BufferSnapshot { /// The kind and amount of indentation in a particular line. For now, /// assumes that indentation is all the same character. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] pub struct IndentSize { /// The number of bytes that comprise the indentation. pub len: u32, @@ -153,7 +153,7 @@ pub struct IndentSize { } /// A whitespace character that's used for indentation. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] pub enum IndentKind { /// An ASCII space character. #[default] From f8195c41e0019b77a56a2eb96c346b601a6c8b89 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 20 Sep 2024 11:52:57 -0400 Subject: [PATCH 245/762] docs: Switch proxy example to socks5h not socks5 (#18142) Very rarely when you have a SOCKS proxy configured do you want local DNS. `socks5` does local DNS. `socks5h` does remote DNS. --- assets/settings/default.json | 2 +- docs/src/configuring-zed.md | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index a9e18652580be3..537ad120829b59 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1029,7 +1029,7 @@ // environment variables. // // Examples: - // - "proxy": "socks5://localhost:10808" + // - "proxy": "socks5h://localhost:10808" // - "proxy": "http://127.0.0.1:10809" "proxy": null, // Set to configure aliases for the command palette. diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 1befa7d93abb7e..de7433bf5dbad3 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1127,10 +1127,10 @@ The following URI schemes are supported: - `http` - `https` -- `socks4` -- `socks4a` -- `socks5` -- `socks5h` +- `socks4` - SOCKS4 proxy with local DNS +- `socks4a` - SOCKS4 proxy with remote DNS +- `socks5` - SOCKS5 proxy with local DNS +- `socks5h` - SOCKS5 proxy with remote DNS `http` will be used when no scheme is specified. @@ -1148,7 +1148,7 @@ Or to set a `socks5` proxy: ```json { - "proxy": "socks5://localhost:10808" + "proxy": "socks5h://localhost:10808" } ``` From 99bef273009a62b416300daa22b9a14910b5ca91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8B=90=E7=8B=B8?= <134658521+Huliiiiii@users.noreply.github.com> Date: Sat, 21 Sep 2024 00:20:14 +0800 Subject: [PATCH 246/762] Add escape string highlights to JSON and JSONC files (#18138) Release Notes: - Added escape string highlights to JSON and JSONC files --- crates/languages/src/json/highlights.scm | 1 + crates/languages/src/jsonc/highlights.scm | 1 + 2 files changed, 2 insertions(+) diff --git a/crates/languages/src/json/highlights.scm b/crates/languages/src/json/highlights.scm index 71168051094a39..8cf7a6d20dc6c7 100644 --- a/crates/languages/src/json/highlights.scm +++ b/crates/languages/src/json/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string +(escape_sequence) @string.escape (pair key: (string) @property.json_key) diff --git a/crates/languages/src/jsonc/highlights.scm b/crates/languages/src/jsonc/highlights.scm index 71168051094a39..8cf7a6d20dc6c7 100644 --- a/crates/languages/src/jsonc/highlights.scm +++ b/crates/languages/src/jsonc/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string +(escape_sequence) @string.escape (pair key: (string) @property.json_key) From d97427f69eb46b62b4decac7ee88f5890a8a575c Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Fri, 20 Sep 2024 12:48:48 -0400 Subject: [PATCH 247/762] chore: Update flake inputs (#18150) Release Notes: - N/A --- flake.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/flake.lock b/flake.lock index 2b421a9efb8b99..a5b7a7a6ae9c46 100644 --- a/flake.lock +++ b/flake.lock @@ -23,11 +23,11 @@ "rust-analyzer-src": "rust-analyzer-src" }, "locked": { - "lastModified": 1726554553, - "narHash": "sha256-xakDhIS1c1VgJc/NMOLj05yBsTdlXKMEYz6wC8Hdshc=", + "lastModified": 1726813972, + "narHash": "sha256-t6turZgoSAVgj7hn5mxzNlLOeVeZvymFo8+ymB52q34=", "owner": "nix-community", "repo": "fenix", - "rev": "1f59d7585aa06d2c327960d397bea4067d8fee98", + "rev": "251caeafc75b710282ee7e375800f75f4c8c5727", "type": "github" }, "original": { @@ -53,11 +53,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1726463316, - "narHash": "sha256-gI9kkaH0ZjakJOKrdjaI/VbaMEo9qBbSUl93DnU7f4c=", + "lastModified": 1726642912, + "narHash": "sha256-wiZzKGHRAhItEuoE599Wm3ic+Lg/NykuBvhb+awf7N8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "99dc8785f6a0adac95f5e2ab05cc2e1bf666d172", + "rev": "395c52d142ec1df377acd67db6d4a22950b02a98", "type": "github" }, "original": { From 9f6ff29a54aeeb1fac22e3d5315d47705d47cb31 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Fri, 20 Sep 2024 12:57:35 -0400 Subject: [PATCH 248/762] Reuse OpenAI low_speed_timeout setting for zed.dev provider (#18144) Release Notes: - N/A --- Cargo.lock | 1 + crates/language_model/Cargo.toml | 1 + crates/language_model/src/provider/cloud.rs | 22 +++++++++++++++++++-- crates/language_model/src/settings.rs | 9 +++++++++ 4 files changed, 31 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 26b8847041ba60..a19506829eeabf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6285,6 +6285,7 @@ dependencies = [ "http_client", "image", "inline_completion_button", + "isahc", "language", "log", "menu", diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index ef273ac44fca39..b63428c544369b 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -32,6 +32,7 @@ futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true http_client.workspace = true +isahc.workspace = true inline_completion_button.workspace = true log.workspace = true menu.workspace = true diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index f8f64ff3b84988..58efb4cfe1b308 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -19,6 +19,7 @@ use gpui::{ Subscription, Task, }; use http_client::{AsyncBody, HttpClient, Method, Response}; +use isahc::config::Configurable; use schemars::JsonSchema; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::value::RawValue; @@ -27,6 +28,7 @@ use smol::{ io::{AsyncReadExt, BufReader}, lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}, }; +use std::time::Duration; use std::{ future, sync::{Arc, LazyLock}, @@ -56,6 +58,7 @@ fn zed_cloud_provider_additional_models() -> &'static [AvailableModel] { #[derive(Default, Clone, Debug, PartialEq)] pub struct ZedDotDevSettings { pub available_models: Vec, + pub low_speed_timeout: Option, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] @@ -380,6 +383,7 @@ impl CloudLanguageModel { client: Arc, llm_api_token: LlmApiToken, body: PerformCompletionParams, + low_speed_timeout: Option, ) -> Result> { let http_client = &client.http_client(); @@ -387,7 +391,11 @@ impl CloudLanguageModel { let mut did_retry = false; let response = loop { - let request = http_client::Request::builder() + let mut request_builder = http_client::Request::builder(); + if let Some(low_speed_timeout) = low_speed_timeout { + request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + }; + let request = request_builder .method(Method::POST) .uri(http_client.build_zed_llm_url("/completion", &[])?.as_ref()) .header("Content-Type", "application/json") @@ -501,8 +509,11 @@ impl LanguageModel for CloudLanguageModel { fn stream_completion( &self, request: LanguageModelRequest, - _cx: &AsyncAppContext, + cx: &AsyncAppContext, ) -> BoxFuture<'static, Result>>> { + let openai_low_speed_timeout = + AllLanguageModelSettings::try_read_global(cx, |s| s.openai.low_speed_timeout.unwrap()); + match &self.model { CloudModel::Anthropic(model) => { let request = request.into_anthropic(model.id().into(), model.max_output_tokens()); @@ -519,6 +530,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; Ok(map_to_language_model_completion_events(Box::pin( @@ -542,6 +554,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + openai_low_speed_timeout, ) .await?; Ok(open_ai::extract_text_from_events(response_lines(response))) @@ -569,6 +582,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; Ok(google_ai::extract_text_from_events(response_lines( @@ -599,6 +613,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; Ok(open_ai::extract_text_from_events(response_lines(response))) @@ -650,6 +665,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; @@ -694,6 +710,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; @@ -741,6 +758,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 80749c0bdb3736..8888d51e11c255 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -231,6 +231,7 @@ pub struct GoogleSettingsContent { #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] pub struct ZedDotDevSettingsContent { available_models: Option>, + pub low_speed_timeout_in_seconds: Option, } #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] @@ -333,6 +334,14 @@ impl settings::Settings for AllLanguageModelSettings { .as_ref() .and_then(|s| s.available_models.clone()), ); + if let Some(low_speed_timeout_in_seconds) = value + .zed_dot_dev + .as_ref() + .and_then(|s| s.low_speed_timeout_in_seconds) + { + settings.zed_dot_dev.low_speed_timeout = + Some(Duration::from_secs(low_speed_timeout_in_seconds)); + } merge( &mut settings.google.api_url, From 8bd624b5db035862ecb89a4cf126167f572712af Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 13:06:43 -0400 Subject: [PATCH 249/762] editor: Remove unneeded blank lines in rewrap test cases (#18152) This PR removes some unneeded blank lines from some of the test cases for `editor::Rewrap`. These weren't meaningful to the test, and their presence could be confusing. Release Notes: - N/A --- crates/editor/src/editor_tests.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 85684db8181333..5927c22cb08439 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4270,7 +4270,6 @@ async fn test_rewrap(cx: &mut TestAppContext) { } else { // } - } "}; @@ -4284,7 +4283,6 @@ async fn test_rewrap(cx: &mut TestAppContext) { } else { // } - } "}; From 601090511bde0cd39985f670d7d2acc895f2594c Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 20 Sep 2024 13:25:06 -0400 Subject: [PATCH 250/762] Remove `system_id` from all events but `editor_events` (#18154) Release Notes: - N/A --- crates/collab/src/api/events.rs | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 008c76e048b9d9..f8ae53201304fb 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -905,7 +905,6 @@ impl AssistantEventRow { #[derive(Debug, clickhouse::Row, Serialize)] pub struct CpuEventRow { - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -944,7 +943,6 @@ impl CpuEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -968,7 +966,6 @@ pub struct MemoryEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1000,7 +997,6 @@ impl MemoryEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1024,7 +1020,6 @@ pub struct AppEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1055,7 +1050,6 @@ impl AppEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1078,7 +1072,6 @@ pub struct SettingEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1109,7 +1102,6 @@ impl SettingEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1133,7 +1125,6 @@ pub struct ExtensionEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1169,7 +1160,6 @@ impl ExtensionEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1260,7 +1250,6 @@ pub struct EditEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, // Note: This column name has a typo in the ClickHouse table. #[serde(rename = "sesssion_id")] @@ -1298,7 +1287,6 @@ impl EditEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, From 5d12e3ce3a318577ff09811bdf57c91674b1beea Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 20 Sep 2024 14:43:26 -0400 Subject: [PATCH 251/762] preview tabs: Toggle preview tab when saving (#18158) Release Notes: - Saving a preview tab will now mark it as a permanent tab --- crates/workspace/src/pane.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index a5f83f961f0400..82300690e7dbe1 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1595,8 +1595,13 @@ impl Pane { } if can_save { - pane.update(cx, |_, cx| item.save(should_format, project, cx))? - .await?; + pane.update(cx, |pane, cx| { + if pane.is_active_preview_item(item.item_id()) { + pane.set_preview_item_id(None, cx); + } + item.save(should_format, project, cx) + })? + .await?; } else if can_save_as { let abs_path = pane.update(cx, |pane, cx| { pane.workspace From 7dac5594cdb02259c455cee90f57fb610b8c6162 Mon Sep 17 00:00:00 2001 From: Daste Date: Fri, 20 Sep 2024 20:44:13 +0200 Subject: [PATCH 252/762] file_finder: Display file icons (#18091) This PR adds file icons (like in tabs, the project panel and tab switcher) to the file finder popup. It's similar to [tab_switcher icons](https://github.com/zed-industries/zed/pull/17115), but simpler, because we're only dealing with actual files. Release Notes: - Added icons to the file finder. Screenshot: ![image](https://github.com/user-attachments/assets/bd6a54c1-cdbd-415a-9a82-0cc7a0bb6ca2) --------- Co-authored-by: Marshall Bowers --- Cargo.lock | 3 +++ assets/settings/default.json | 5 ++++ crates/file_finder/Cargo.toml | 3 +++ crates/file_finder/src/file_finder.rs | 21 +++++++++++++-- .../file_finder/src/file_finder_settings.rs | 27 +++++++++++++++++++ 5 files changed, 57 insertions(+), 2 deletions(-) create mode 100644 crates/file_finder/src/file_finder_settings.rs diff --git a/Cargo.lock b/Cargo.lock index a19506829eeabf..dd07dfa1cf0843 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4326,6 +4326,7 @@ dependencies = [ "ctor", "editor", "env_logger", + "file_icons", "futures 0.3.30", "fuzzy", "gpui", @@ -4333,7 +4334,9 @@ dependencies = [ "menu", "picker", "project", + "schemars", "serde", + "serde_derive", "serde_json", "settings", "text", diff --git a/assets/settings/default.json b/assets/settings/default.json index 537ad120829b59..8424c5733d81bc 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -496,6 +496,11 @@ // Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. "enable_preview_from_code_navigation": false }, + // Settings related to the file finder. + "file_finder": { + // Whether to show file icons in the file finder. + "file_icons": true + }, // Whether or not to remove any trailing whitespace from lines of a buffer // before saving it. "remove_trailing_whitespace_on_save": true, diff --git a/crates/file_finder/Cargo.toml b/crates/file_finder/Cargo.toml index 8f17b191a53073..2b4aa5fe3080cf 100644 --- a/crates/file_finder/Cargo.toml +++ b/crates/file_finder/Cargo.toml @@ -16,14 +16,17 @@ doctest = false anyhow.workspace = true collections.workspace = true editor.workspace = true +file_icons.workspace = true futures.workspace = true fuzzy.workspace = true gpui.workspace = true menu.workspace = true picker.workspace = true project.workspace = true +schemars.workspace = true settings.workspace = true serde.workspace = true +serde_derive.workspace = true text.workspace = true theme.workspace = true ui.workspace = true diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 50a14b62dbb4d1..e1e0998f8aa3b6 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1,11 +1,14 @@ #[cfg(test)] mod file_finder_tests; +mod file_finder_settings; mod new_path_prompt; mod open_path_prompt; use collections::HashMap; use editor::{scroll::Autoscroll, Bias, Editor}; +use file_finder_settings::FileFinderSettings; +use file_icons::FileIcons; use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use gpui::{ actions, rems, Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle, @@ -39,7 +42,12 @@ pub struct FileFinder { init_modifiers: Option, } +pub fn init_settings(cx: &mut AppContext) { + FileFinderSettings::register(cx); +} + pub fn init(cx: &mut AppContext) { + init_settings(cx); cx.observe_new_views(FileFinder::register).detach(); cx.observe_new_views(NewPathPrompt::register).detach(); cx.observe_new_views(OpenPathPrompt::register).detach(); @@ -1041,12 +1049,14 @@ impl PickerDelegate for FileFinderDelegate { selected: bool, cx: &mut ViewContext>, ) -> Option { + let settings = FileFinderSettings::get_global(cx); + let path_match = self .matches .get(ix) .expect("Invalid matches state: no element for index {ix}"); - let icon = match &path_match { + let history_icon = match &path_match { Match::History { .. } => Icon::new(IconName::HistoryRerun) .color(Color::Muted) .size(IconSize::Small) @@ -1059,10 +1069,17 @@ impl PickerDelegate for FileFinderDelegate { let (file_name, file_name_positions, full_path, full_path_positions) = self.labels_for_match(path_match, cx, ix); + let file_icon = if settings.file_icons { + FileIcons::get_icon(Path::new(&file_name), cx).map(Icon::from_path) + } else { + None + }; + Some( ListItem::new(ix) .spacing(ListItemSpacing::Sparse) - .end_slot::(Some(icon)) + .start_slot::(file_icon) + .end_slot::(history_icon) .inset(true) .selected(selected) .child( diff --git a/crates/file_finder/src/file_finder_settings.rs b/crates/file_finder/src/file_finder_settings.rs new file mode 100644 index 00000000000000..c02008c917b7fb --- /dev/null +++ b/crates/file_finder/src/file_finder_settings.rs @@ -0,0 +1,27 @@ +use anyhow::Result; +use schemars::JsonSchema; +use serde_derive::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +pub struct FileFinderSettings { + pub file_icons: bool, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct FileFinderSettingsContent { + /// Whether to show file icons in the file finder. + /// + /// Default: true + pub file_icons: Option, +} + +impl Settings for FileFinderSettings { + const KEY: Option<&'static str> = Some("file_finder"); + + type FileContent = FileFinderSettingsContent; + + fn load(sources: SettingsSources, _: &mut gpui::AppContext) -> Result { + sources.json_merge() + } +} From 45388805ad4bc5e27c0fcdd6936fb5bce687a8ff Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 20 Sep 2024 13:02:39 -0600 Subject: [PATCH 253/762] vim: gq (#18156) Closes #ISSUE Release Notes: - vim: Added gq/gw for rewrapping lines --- assets/keymaps/vim.json | 13 +++- crates/editor/src/editor.rs | 6 +- crates/vim/src/normal.rs | 30 +++++++- crates/vim/src/rewrap.rs | 114 ++++++++++++++++++++++++++++++ crates/vim/src/state.rs | 3 + crates/vim/src/vim.rs | 2 + crates/vim/test_data/test_gq.json | 12 ++++ 7 files changed, 177 insertions(+), 3 deletions(-) create mode 100644 crates/vim/src/rewrap.rs create mode 100644 crates/vim/test_data/test_gq.json diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 18b38384ef81ce..8d933f19afb1dd 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -124,7 +124,6 @@ "g i": "vim::InsertAtPrevious", "g ,": "vim::ChangeListNewer", "g ;": "vim::ChangeListOlder", - "g q": "editor::Rewrap", "shift-h": "vim::WindowTop", "shift-m": "vim::WindowMiddle", "shift-l": "vim::WindowBottom", @@ -240,6 +239,8 @@ "g shift-u": ["vim::PushOperator", "Uppercase"], "g ~": ["vim::PushOperator", "OppositeCase"], "\"": ["vim::PushOperator", "Register"], + "g q": ["vim::PushOperator", "Rewrap"], + "g w": ["vim::PushOperator", "Rewrap"], "q": "vim::ToggleRecord", "shift-q": "vim::ReplayLastRecording", "@": ["vim::PushOperator", "ReplayRegister"], @@ -301,6 +302,7 @@ "i": ["vim::PushOperator", { "Object": { "around": false } }], "a": ["vim::PushOperator", { "Object": { "around": true } }], "g c": "vim::ToggleComments", + "g q": "vim::Rewrap", "\"": ["vim::PushOperator", "Register"], // tree-sitter related commands "[ x": "editor::SelectLargerSyntaxNode", @@ -428,6 +430,15 @@ "~": "vim::CurrentLine" } }, + { + "context": "vim_operator == gq", + "bindings": { + "g q": "vim::CurrentLine", + "q": "vim::CurrentLine", + "g w": "vim::CurrentLine", + "w": "vim::CurrentLine" + } + }, { "context": "vim_operator == y", "bindings": { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 33eb51cb0ecb77..1f4a9376d22fa5 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6705,6 +6705,10 @@ impl Editor { } pub fn rewrap(&mut self, _: &Rewrap, cx: &mut ViewContext) { + self.rewrap_impl(true, cx) + } + + pub fn rewrap_impl(&mut self, only_text: bool, cx: &mut ViewContext) { let buffer = self.buffer.read(cx).snapshot(cx); let selections = self.selections.all::(cx); let mut selections = selections.iter().peekable(); @@ -6725,7 +6729,7 @@ impl Editor { continue; } - let mut should_rewrap = false; + let mut should_rewrap = !only_text; if let Some(language_scope) = buffer.language_scope_at(selection.head()) { match language_scope.language_name().0.as_ref() { diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 741e09f178ff3e..10bf3c8e8d73b4 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -168,6 +168,7 @@ impl Vim { Some(Operator::Yank) => self.yank_motion(motion, times, cx), Some(Operator::AddSurrounds { target: None }) => {} Some(Operator::Indent) => self.indent_motion(motion, times, IndentDirection::In, cx), + Some(Operator::Rewrap) => self.rewrap_motion(motion, times, cx), Some(Operator::Outdent) => self.indent_motion(motion, times, IndentDirection::Out, cx), Some(Operator::Lowercase) => { self.change_case_motion(motion, times, CaseTarget::Lowercase, cx) @@ -199,6 +200,7 @@ impl Vim { Some(Operator::Outdent) => { self.indent_object(object, around, IndentDirection::Out, cx) } + Some(Operator::Rewrap) => self.rewrap_object(object, around, cx), Some(Operator::Lowercase) => { self.change_case_object(object, around, CaseTarget::Lowercase, cx) } @@ -478,8 +480,9 @@ impl Vim { } #[cfg(test)] mod test { - use gpui::{KeyBinding, TestAppContext}; + use gpui::{KeyBinding, TestAppContext, UpdateGlobal}; use indoc::indoc; + use language::language_settings::AllLanguageSettings; use settings::SettingsStore; use crate::{ @@ -1386,4 +1389,29 @@ mod test { cx.simulate_shared_keystrokes("2 0 r - ").await; cx.shared_state().await.assert_eq("ˇhello world\n"); } + + #[gpui::test] + async fn test_gq(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_neovim_option("textwidth=5").await; + + cx.update(|cx| { + SettingsStore::update_global(cx, |settings, cx| { + settings.update_user_settings::(cx, |settings| { + settings.defaults.preferred_line_length = Some(5); + }); + }) + }); + + cx.set_shared_state("ˇth th th th th th\n").await; + cx.simulate_shared_keystrokes("g q q").await; + cx.shared_state().await.assert_eq("th th\nth th\nˇth th\n"); + + cx.set_shared_state("ˇth th th th th th\nth th th th th th\n") + .await; + cx.simulate_shared_keystrokes("v j g q").await; + cx.shared_state() + .await + .assert_eq("th th\nth th\nth th\nth th\nth th\nˇth th\n"); + } } diff --git a/crates/vim/src/rewrap.rs b/crates/vim/src/rewrap.rs new file mode 100644 index 00000000000000..3e61b3c3a181b3 --- /dev/null +++ b/crates/vim/src/rewrap.rs @@ -0,0 +1,114 @@ +use crate::{motion::Motion, object::Object, state::Mode, Vim}; +use collections::HashMap; +use editor::{display_map::ToDisplayPoint, scroll::Autoscroll, Bias, Editor}; +use gpui::actions; +use language::SelectionGoal; +use ui::ViewContext; + +actions!(vim, [Rewrap]); + +pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { + Vim::action(editor, cx, |vim, _: &Rewrap, cx| { + vim.record_current_action(cx); + vim.take_count(cx); + vim.store_visual_marks(cx); + vim.update_editor(cx, |vim, editor, cx| { + editor.transact(cx, |editor, cx| { + let mut positions = vim.save_selection_starts(editor, cx); + editor.rewrap_impl(false, cx); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + if let Some(anchor) = positions.remove(&selection.id) { + let mut point = anchor.to_display_point(map); + *point.column_mut() = 0; + selection.collapse_to(point, SelectionGoal::None); + } + }); + }); + }); + }); + if vim.mode.is_visual() { + vim.switch_mode(Mode::Normal, true, cx) + } + }); +} + +impl Vim { + pub(crate) fn rewrap_motion( + &mut self, + motion: Motion, + times: Option, + cx: &mut ViewContext, + ) { + self.stop_recording(cx); + self.update_editor(cx, |_, editor, cx| { + let text_layout_details = editor.text_layout_details(cx); + editor.transact(cx, |editor, cx| { + let mut selection_starts: HashMap<_, _> = Default::default(); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = map.display_point_to_anchor(selection.head(), Bias::Right); + selection_starts.insert(selection.id, anchor); + motion.expand_selection(map, selection, times, false, &text_layout_details); + }); + }); + editor.rewrap_impl(false, cx); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = selection_starts.remove(&selection.id).unwrap(); + let mut point = anchor.to_display_point(map); + *point.column_mut() = 0; + selection.collapse_to(point, SelectionGoal::None); + }); + }); + }); + }); + } + + pub(crate) fn rewrap_object( + &mut self, + object: Object, + around: bool, + cx: &mut ViewContext, + ) { + self.stop_recording(cx); + self.update_editor(cx, |_, editor, cx| { + editor.transact(cx, |editor, cx| { + let mut original_positions: HashMap<_, _> = Default::default(); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = map.display_point_to_anchor(selection.head(), Bias::Right); + original_positions.insert(selection.id, anchor); + object.expand_selection(map, selection, around); + }); + }); + editor.rewrap_impl(false, cx); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = original_positions.remove(&selection.id).unwrap(); + let mut point = anchor.to_display_point(map); + *point.column_mut() = 0; + selection.collapse_to(point, SelectionGoal::None); + }); + }); + }); + }); + } +} + +#[cfg(test)] +mod test { + use crate::test::NeovimBackedTestContext; + + #[gpui::test] + async fn test_indent_gv(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_neovim_option("shiftwidth=4").await; + + cx.set_shared_state("ˇhello\nworld\n").await; + cx.simulate_shared_keystrokes("v j > g v").await; + cx.shared_state() + .await + .assert_eq("« hello\n ˇ» world\n"); + } +} diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 1d642e990f83bd..b61cb405e1104e 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -72,6 +72,7 @@ pub enum Operator { Jump { line: bool }, Indent, Outdent, + Rewrap, Lowercase, Uppercase, OppositeCase, @@ -454,6 +455,7 @@ impl Operator { Operator::Jump { line: true } => "'", Operator::Jump { line: false } => "`", Operator::Indent => ">", + Operator::Rewrap => "gq", Operator::Outdent => "<", Operator::Uppercase => "gU", Operator::Lowercase => "gu", @@ -482,6 +484,7 @@ impl Operator { Operator::Change | Operator::Delete | Operator::Yank + | Operator::Rewrap | Operator::Indent | Operator::Outdent | Operator::Lowercase diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index a4b77b1a7a3ef8..701972c19bb614 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -13,6 +13,7 @@ mod motion; mod normal; mod object; mod replace; +mod rewrap; mod state; mod surrounds; mod visual; @@ -291,6 +292,7 @@ impl Vim { command::register(editor, cx); replace::register(editor, cx); indent::register(editor, cx); + rewrap::register(editor, cx); object::register(editor, cx); visual::register(editor, cx); change_list::register(editor, cx); diff --git a/crates/vim/test_data/test_gq.json b/crates/vim/test_data/test_gq.json new file mode 100644 index 00000000000000..08cdb1231541e4 --- /dev/null +++ b/crates/vim/test_data/test_gq.json @@ -0,0 +1,12 @@ +{"SetOption":{"value":"textwidth=5"}} +{"Put":{"state":"ˇth th th th th th\n"}} +{"Key":"g"} +{"Key":"q"} +{"Key":"q"} +{"Get":{"state":"th th\nth th\nˇth th\n","mode":"Normal"}} +{"Put":{"state":"ˇth th th th th th\nth th th th th th\n"}} +{"Key":"v"} +{"Key":"j"} +{"Key":"g"} +{"Key":"q"} +{"Get":{"state":"th th\nth th\nth th\nth th\nth th\nˇth th\n","mode":"Normal"}} From 7d62fda5a38d1199e79c30177828dfac2a1ce4b3 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Sat, 21 Sep 2024 03:49:40 +0800 Subject: [PATCH 254/762] file_finder: Notify user when picker an non-utf8 file (#18136) notify user when using file finder picker an file which cannot open. Release Notes: - N/A --- crates/file_finder/src/file_finder.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index e1e0998f8aa3b6..4c3f92d3c156a0 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -31,7 +31,7 @@ use std::{ use text::Point; use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; use util::{paths::PathWithPosition, post_inc, ResultExt}; -use workspace::{item::PreviewTabsSettings, ModalView, Workspace}; +use workspace::{item::PreviewTabsSettings, notifications::NotifyResultExt, ModalView, Workspace}; actions!(file_finder, [SelectPrev]); @@ -1011,7 +1011,7 @@ impl PickerDelegate for FileFinderDelegate { let finder = self.file_finder.clone(); cx.spawn(|_, mut cx| async move { - let item = open_task.await.log_err()?; + let item = open_task.await.notify_async_err(&mut cx)?; if let Some(row) = row { if let Some(active_editor) = item.downcast::() { active_editor From 5905fbb9accdc5d34b7fec0fe021022a5b38420e Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Fri, 20 Sep 2024 16:59:12 -0400 Subject: [PATCH 255/762] Allow Anthropic custom models to override temperature (#18160) Release Notes: - Allow Anthropic custom models to override "temperature" This also centralized the defaulting of "temperature" to be inside of each model's `into_x` call instead of being sprinkled around the code. --- crates/anthropic/src/anthropic.rs | 14 ++++++++++++++ crates/assistant/src/context.rs | 2 +- crates/assistant/src/inline_assistant.rs | 2 +- crates/assistant/src/prompt_library.rs | 2 +- .../assistant/src/slash_command/auto_command.rs | 2 +- .../assistant/src/terminal_inline_assistant.rs | 2 +- crates/language_model/src/provider/anthropic.rs | 10 ++++++++-- crates/language_model/src/provider/cloud.rs | 16 +++++++++++++--- crates/language_model/src/provider/ollama.rs | 2 +- crates/language_model/src/request.rs | 15 ++++++++++----- crates/language_model/src/settings.rs | 2 ++ crates/semantic_index/src/summary_index.rs | 2 +- 12 files changed, 54 insertions(+), 17 deletions(-) diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index f960dc541a2866..91b6723e90be97 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -49,6 +49,7 @@ pub enum Model { /// Indicates whether this custom model supports caching. cache_configuration: Option, max_output_tokens: Option, + default_temperature: Option, }, } @@ -124,6 +125,19 @@ impl Model { } } + pub fn default_temperature(&self) -> f32 { + match self { + Self::Claude3_5Sonnet + | Self::Claude3Opus + | Self::Claude3Sonnet + | Self::Claude3Haiku => 1.0, + Self::Custom { + default_temperature, + .. + } => default_temperature.unwrap_or(1.0), + } + } + pub fn tool_model_id(&self) -> &str { if let Self::Custom { tool_override: Some(tool_override), diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 830c0980491f7c..97a5b3ea988bcc 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -2180,7 +2180,7 @@ impl Context { messages: Vec::new(), tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }; for message in self.messages(cx) { if message.status != MessageStatus::Done { diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index d95b54d3c6a6ac..f2428c3a2e94cf 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -2732,7 +2732,7 @@ impl CodegenAlternative { messages, tools: Vec::new(), stop: Vec::new(), - temperature: 1., + temperature: None, }) } diff --git a/crates/assistant/src/prompt_library.rs b/crates/assistant/src/prompt_library.rs index 76ee95d5070b82..24e20a18a799a6 100644 --- a/crates/assistant/src/prompt_library.rs +++ b/crates/assistant/src/prompt_library.rs @@ -796,7 +796,7 @@ impl PromptLibrary { }], tools: Vec::new(), stop: Vec::new(), - temperature: 1., + temperature: None, }, cx, ) diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs index e1f20c311bd36e..14cee296820989 100644 --- a/crates/assistant/src/slash_command/auto_command.rs +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -216,7 +216,7 @@ async fn commands_for_summaries( }], tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }; while let Some(current_summaries) = stack.pop() { diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index caf819bae535ee..e1a26d851003eb 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -284,7 +284,7 @@ impl TerminalInlineAssistant { messages, tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }) } diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index 1e3d2750949f16..86538bec49172d 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -51,6 +51,7 @@ pub struct AvailableModel { /// Configuration of Anthropic's caching API. pub cache_configuration: Option, pub max_output_tokens: Option, + pub default_temperature: Option, } pub struct AnthropicLanguageModelProvider { @@ -200,6 +201,7 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider { } }), max_output_tokens: model.max_output_tokens, + default_temperature: model.default_temperature, }, ); } @@ -375,8 +377,11 @@ impl LanguageModel for AnthropicModel { request: LanguageModelRequest, cx: &AsyncAppContext, ) -> BoxFuture<'static, Result>>> { - let request = - request.into_anthropic(self.model.id().into(), self.model.max_output_tokens()); + let request = request.into_anthropic( + self.model.id().into(), + self.model.default_temperature(), + self.model.max_output_tokens(), + ); let request = self.stream_completion(request, cx); let future = self.request_limiter.stream(async move { let response = request.await.map_err(|err| anyhow!(err))?; @@ -405,6 +410,7 @@ impl LanguageModel for AnthropicModel { ) -> BoxFuture<'static, Result>>> { let mut request = request.into_anthropic( self.model.tool_model_id().into(), + self.model.default_temperature(), self.model.max_output_tokens(), ); request.tool_choice = Some(anthropic::ToolChoice::Tool { diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 58efb4cfe1b308..606a6fbacec7b0 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -87,6 +87,8 @@ pub struct AvailableModel { pub tool_override: Option, /// Indicates whether this custom model supports caching. pub cache_configuration: Option, + /// The default temperature to use for this model. + pub default_temperature: Option, } pub struct CloudLanguageModelProvider { @@ -255,6 +257,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider { min_total_token: config.min_total_token, } }), + default_temperature: model.default_temperature, max_output_tokens: model.max_output_tokens, }), AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom { @@ -516,7 +519,11 @@ impl LanguageModel for CloudLanguageModel { match &self.model { CloudModel::Anthropic(model) => { - let request = request.into_anthropic(model.id().into(), model.max_output_tokens()); + let request = request.into_anthropic( + model.id().into(), + model.default_temperature(), + model.max_output_tokens(), + ); let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); let future = self.request_limiter.stream(async move { @@ -642,8 +649,11 @@ impl LanguageModel for CloudLanguageModel { match &self.model { CloudModel::Anthropic(model) => { - let mut request = - request.into_anthropic(model.tool_model_id().into(), model.max_output_tokens()); + let mut request = request.into_anthropic( + model.tool_model_id().into(), + model.default_temperature(), + model.max_output_tokens(), + ); request.tool_choice = Some(anthropic::ToolChoice::Tool { name: tool_name.clone(), }); diff --git a/crates/language_model/src/provider/ollama.rs b/crates/language_model/src/provider/ollama.rs index 6a3190dee7c229..a29ff3cf6a7a1a 100644 --- a/crates/language_model/src/provider/ollama.rs +++ b/crates/language_model/src/provider/ollama.rs @@ -235,7 +235,7 @@ impl OllamaLanguageModel { options: Some(ChatOptions { num_ctx: Some(self.model.max_tokens), stop: Some(request.stop), - temperature: Some(request.temperature), + temperature: request.temperature.or(Some(1.0)), ..Default::default() }), tools: vec![], diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index dd480b8aaf38c2..06dde1862ab37e 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -236,7 +236,7 @@ pub struct LanguageModelRequest { pub messages: Vec, pub tools: Vec, pub stop: Vec, - pub temperature: f32, + pub temperature: Option, } impl LanguageModelRequest { @@ -262,7 +262,7 @@ impl LanguageModelRequest { .collect(), stream, stop: self.stop, - temperature: self.temperature, + temperature: self.temperature.unwrap_or(1.0), max_tokens: max_output_tokens, tools: Vec::new(), tool_choice: None, @@ -290,7 +290,7 @@ impl LanguageModelRequest { candidate_count: Some(1), stop_sequences: Some(self.stop), max_output_tokens: None, - temperature: Some(self.temperature as f64), + temperature: self.temperature.map(|t| t as f64).or(Some(1.0)), top_p: None, top_k: None, }), @@ -298,7 +298,12 @@ impl LanguageModelRequest { } } - pub fn into_anthropic(self, model: String, max_output_tokens: u32) -> anthropic::Request { + pub fn into_anthropic( + self, + model: String, + default_temperature: f32, + max_output_tokens: u32, + ) -> anthropic::Request { let mut new_messages: Vec = Vec::new(); let mut system_message = String::new(); @@ -400,7 +405,7 @@ impl LanguageModelRequest { tool_choice: None, metadata: None, stop_sequences: Vec::new(), - temperature: Some(self.temperature), + temperature: self.temperature.or(Some(default_temperature)), top_k: None, top_p: None, } diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 8888d51e11c255..2bf8deb04238c2 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -99,6 +99,7 @@ impl AnthropicSettingsContent { tool_override, cache_configuration, max_output_tokens, + default_temperature, } => Some(provider::anthropic::AvailableModel { name, display_name, @@ -112,6 +113,7 @@ impl AnthropicSettingsContent { }, ), max_output_tokens, + default_temperature, }), _ => None, }) diff --git a/crates/semantic_index/src/summary_index.rs b/crates/semantic_index/src/summary_index.rs index 08f25ae0287fa2..f4c6d4726c508b 100644 --- a/crates/semantic_index/src/summary_index.rs +++ b/crates/semantic_index/src/summary_index.rs @@ -562,7 +562,7 @@ impl SummaryIndex { }], tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }; let code_len = code.len(); From e309fbda2a95a55a043ad41ead97c568c7aeef19 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 20 Sep 2024 15:09:18 -0700 Subject: [PATCH 256/762] Add a slash command for automatically retrieving relevant context (#17972) * [x] put this slash command behind a feature flag until we release embedding access to the general population * [x] choose a name for this slash command and name the rust module to match Release Notes: - N/A --------- Co-authored-by: Jason Co-authored-by: Richard Co-authored-by: Jason Mancuso <7891333+jvmncs@users.noreply.github.com> Co-authored-by: Richard Feldman --- assets/prompts/project_slash_command.hbs | 8 + crates/assistant/src/assistant.rs | 32 ++- crates/assistant/src/context.rs | 5 +- crates/assistant/src/prompts.rs | 15 + crates/assistant/src/slash_command.rs | 2 +- .../slash_command/cargo_workspace_command.rs | 153 ++++++++++ .../src/slash_command/project_command.rs | 259 +++++++++-------- .../src/slash_command/search_command.rs | 63 +++-- .../assistant/src/slash_command_settings.rs | 10 +- crates/evals/src/eval.rs | 2 +- crates/semantic_index/examples/index.rs | 2 +- crates/semantic_index/src/embedding.rs | 23 +- crates/semantic_index/src/project_index.rs | 61 ++-- crates/semantic_index/src/semantic_index.rs | 265 +++++++++++++++--- 14 files changed, 680 insertions(+), 220 deletions(-) create mode 100644 assets/prompts/project_slash_command.hbs create mode 100644 crates/assistant/src/slash_command/cargo_workspace_command.rs diff --git a/assets/prompts/project_slash_command.hbs b/assets/prompts/project_slash_command.hbs new file mode 100644 index 00000000000000..6c63f71d895274 --- /dev/null +++ b/assets/prompts/project_slash_command.hbs @@ -0,0 +1,8 @@ +A software developer is asking a question about their project. The source files in their project have been indexed into a database of semantic text embeddings. +Your task is to generate a list of 4 diverse search queries that can be run on this embedding database, in order to retrieve a list of code snippets +that are relevant to the developer's question. Redundant search queries will be heavily penalized, so only include another query if it's sufficiently +distinct from previous ones. + +Here is the question that's been asked, together with context that the developer has added manually: + +{{{context_buffer}}} diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index 8b9c66ee55848d..9cc63af5a1adee 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -41,9 +41,10 @@ use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; use slash_command::{ - auto_command, context_server_command, default_command, delta_command, diagnostics_command, - docs_command, fetch_command, file_command, now_command, project_command, prompt_command, - search_command, symbols_command, tab_command, terminal_command, workflow_command, + auto_command, cargo_workspace_command, context_server_command, default_command, delta_command, + diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command, + prompt_command, search_command, symbols_command, tab_command, terminal_command, + workflow_command, }; use std::path::PathBuf; use std::sync::Arc; @@ -384,20 +385,33 @@ fn register_slash_commands(prompt_builder: Option>, cx: &mut slash_command_registry.register_command(delta_command::DeltaSlashCommand, true); slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true); slash_command_registry.register_command(tab_command::TabSlashCommand, true); - slash_command_registry.register_command(project_command::ProjectSlashCommand, true); + slash_command_registry + .register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true); slash_command_registry.register_command(prompt_command::PromptSlashCommand, true); slash_command_registry.register_command(default_command::DefaultSlashCommand, false); slash_command_registry.register_command(terminal_command::TerminalSlashCommand, true); slash_command_registry.register_command(now_command::NowSlashCommand, false); slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true); + slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); if let Some(prompt_builder) = prompt_builder { slash_command_registry.register_command( workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()), true, ); + cx.observe_flag::({ + let slash_command_registry = slash_command_registry.clone(); + move |is_enabled, _cx| { + if is_enabled { + slash_command_registry.register_command( + project_command::ProjectSlashCommand::new(prompt_builder.clone()), + true, + ); + } + } + }) + .detach(); } - slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); cx.observe_flag::({ let slash_command_registry = slash_command_registry.clone(); @@ -435,10 +449,12 @@ fn update_slash_commands_from_settings(cx: &mut AppContext) { slash_command_registry.unregister_command(docs_command::DocsSlashCommand); } - if settings.project.enabled { - slash_command_registry.register_command(project_command::ProjectSlashCommand, true); + if settings.cargo_workspace.enabled { + slash_command_registry + .register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true); } else { - slash_command_registry.unregister_command(project_command::ProjectSlashCommand); + slash_command_registry + .unregister_command(cargo_workspace_command::CargoWorkspaceSlashCommand); } } diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 97a5b3ea988bcc..1cac47831f52dc 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -1967,8 +1967,9 @@ impl Context { } pub fn assist(&mut self, cx: &mut ModelContext) -> Option { - let provider = LanguageModelRegistry::read_global(cx).active_provider()?; - let model = LanguageModelRegistry::read_global(cx).active_model()?; + let model_registry = LanguageModelRegistry::read_global(cx); + let provider = model_registry.active_provider()?; + let model = model_registry.active_model()?; let last_message_id = self.get_last_valid_message_id(cx)?; if !provider.is_authenticated(cx) { diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 3b9f75bac930b6..106935cb882695 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -40,6 +40,11 @@ pub struct TerminalAssistantPromptContext { pub user_prompt: String, } +#[derive(Serialize)] +pub struct ProjectSlashCommandPromptContext { + pub context_buffer: String, +} + /// Context required to generate a workflow step resolution prompt. #[derive(Debug, Serialize)] pub struct StepResolutionContext { @@ -317,4 +322,14 @@ impl PromptBuilder { pub fn generate_workflow_prompt(&self) -> Result { self.handlebars.lock().render("edit_workflow", &()) } + + pub fn generate_project_slash_command_prompt( + &self, + context_buffer: String, + ) -> Result { + self.handlebars.lock().render( + "project_slash_command", + &ProjectSlashCommandPromptContext { context_buffer }, + ) + } } diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index cf957a15c67d9d..e430e35622a222 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -18,8 +18,8 @@ use std::{ }; use ui::ActiveTheme; use workspace::Workspace; - pub mod auto_command; +pub mod cargo_workspace_command; pub mod context_server_command; pub mod default_command; pub mod delta_command; diff --git a/crates/assistant/src/slash_command/cargo_workspace_command.rs b/crates/assistant/src/slash_command/cargo_workspace_command.rs new file mode 100644 index 00000000000000..baf16d7f014cb2 --- /dev/null +++ b/crates/assistant/src/slash_command/cargo_workspace_command.rs @@ -0,0 +1,153 @@ +use super::{SlashCommand, SlashCommandOutput}; +use anyhow::{anyhow, Context, Result}; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use fs::Fs; +use gpui::{AppContext, Model, Task, WeakView}; +use language::{BufferSnapshot, LspAdapterDelegate}; +use project::{Project, ProjectPath}; +use std::{ + fmt::Write, + path::Path, + sync::{atomic::AtomicBool, Arc}, +}; +use ui::prelude::*; +use workspace::Workspace; + +pub(crate) struct CargoWorkspaceSlashCommand; + +impl CargoWorkspaceSlashCommand { + async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { + let buffer = fs.load(path_to_cargo_toml).await?; + let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; + + let mut message = String::new(); + writeln!(message, "You are in a Rust project.")?; + + if let Some(workspace) = cargo_toml.workspace { + writeln!( + message, + "The project is a Cargo workspace with the following members:" + )?; + for member in workspace.members { + writeln!(message, "- {member}")?; + } + + if !workspace.default_members.is_empty() { + writeln!(message, "The default members are:")?; + for member in workspace.default_members { + writeln!(message, "- {member}")?; + } + } + + if !workspace.dependencies.is_empty() { + writeln!( + message, + "The following workspace dependencies are installed:" + )?; + for dependency in workspace.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } else if let Some(package) = cargo_toml.package { + writeln!( + message, + "The project name is \"{name}\".", + name = package.name + )?; + + let description = package + .description + .as_ref() + .and_then(|description| description.get().ok().cloned()); + if let Some(description) = description.as_ref() { + writeln!(message, "It describes itself as \"{description}\".")?; + } + + if !cargo_toml.dependencies.is_empty() { + writeln!(message, "The following dependencies are installed:")?; + for dependency in cargo_toml.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } + + Ok(message) + } + + fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { + let worktree = project.read(cx).worktrees(cx).next()?; + let worktree = worktree.read(cx); + let entry = worktree.entry_for_path("Cargo.toml")?; + let path = ProjectPath { + worktree_id: worktree.id(), + path: entry.path.clone(), + }; + Some(Arc::from( + project.read(cx).absolute_path(&path, cx)?.as_path(), + )) + } +} + +impl SlashCommand for CargoWorkspaceSlashCommand { + fn name(&self) -> String { + "cargo-workspace".into() + } + + fn description(&self) -> String { + "insert project workspace metadata".into() + } + + fn menu_text(&self) -> String { + "Insert Project Workspace Metadata".into() + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancel: Arc, + _workspace: Option>, + _cx: &mut WindowContext, + ) -> Task>> { + Task::ready(Err(anyhow!("this command does not require argument"))) + } + + fn requires_argument(&self) -> bool { + false + } + + fn run( + self: Arc, + _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, + workspace: WeakView, + _delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let output = workspace.update(cx, |workspace, cx| { + let project = workspace.project().clone(); + let fs = workspace.project().read(cx).fs().clone(); + let path = Self::path_to_cargo_toml(project, cx); + let output = cx.background_executor().spawn(async move { + let path = path.with_context(|| "Cargo.toml not found")?; + Self::build_message(fs, &path).await + }); + + cx.foreground_executor().spawn(async move { + let text = output.await?; + let range = 0..text.len(); + Ok(SlashCommandOutput { + text, + sections: vec![SlashCommandOutputSection { + range, + icon: IconName::FileTree, + label: "Project".into(), + metadata: None, + }], + run_commands_in_text: false, + }) + }) + }); + output.unwrap_or_else(|error| Task::ready(Err(error))) + } +} diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 3e8596d942bf61..197e91d91addac 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -1,90 +1,39 @@ -use super::{SlashCommand, SlashCommandOutput}; -use anyhow::{anyhow, Context, Result}; +use super::{ + create_label_for_command, search_command::add_search_result_section, SlashCommand, + SlashCommandOutput, +}; +use crate::PromptBuilder; +use anyhow::{anyhow, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; -use fs::Fs; -use gpui::{AppContext, Model, Task, WeakView}; -use language::{BufferSnapshot, LspAdapterDelegate}; -use project::{Project, ProjectPath}; +use feature_flags::FeatureFlag; +use gpui::{AppContext, Task, WeakView, WindowContext}; +use language::{Anchor, CodeLabel, LspAdapterDelegate}; +use language_model::{LanguageModelRegistry, LanguageModelTool}; +use schemars::JsonSchema; +use semantic_index::SemanticDb; +use serde::Deserialize; + +pub struct ProjectSlashCommandFeatureFlag; + +impl FeatureFlag for ProjectSlashCommandFeatureFlag { + const NAME: &'static str = "project-slash-command"; +} + use std::{ - fmt::Write, - path::Path, + fmt::Write as _, + ops::DerefMut, sync::{atomic::AtomicBool, Arc}, }; -use ui::prelude::*; +use ui::{BorrowAppContext as _, IconName}; use workspace::Workspace; -pub(crate) struct ProjectSlashCommand; +pub struct ProjectSlashCommand { + prompt_builder: Arc, +} impl ProjectSlashCommand { - async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { - let buffer = fs.load(path_to_cargo_toml).await?; - let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; - - let mut message = String::new(); - writeln!(message, "You are in a Rust project.")?; - - if let Some(workspace) = cargo_toml.workspace { - writeln!( - message, - "The project is a Cargo workspace with the following members:" - )?; - for member in workspace.members { - writeln!(message, "- {member}")?; - } - - if !workspace.default_members.is_empty() { - writeln!(message, "The default members are:")?; - for member in workspace.default_members { - writeln!(message, "- {member}")?; - } - } - - if !workspace.dependencies.is_empty() { - writeln!( - message, - "The following workspace dependencies are installed:" - )?; - for dependency in workspace.dependencies.keys() { - writeln!(message, "- {dependency}")?; - } - } - } else if let Some(package) = cargo_toml.package { - writeln!( - message, - "The project name is \"{name}\".", - name = package.name - )?; - - let description = package - .description - .as_ref() - .and_then(|description| description.get().ok().cloned()); - if let Some(description) = description.as_ref() { - writeln!(message, "It describes itself as \"{description}\".")?; - } - - if !cargo_toml.dependencies.is_empty() { - writeln!(message, "The following dependencies are installed:")?; - for dependency in cargo_toml.dependencies.keys() { - writeln!(message, "- {dependency}")?; - } - } - } - - Ok(message) - } - - fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { - let worktree = project.read(cx).worktrees(cx).next()?; - let worktree = worktree.read(cx); - let entry = worktree.entry_for_path("Cargo.toml")?; - let path = ProjectPath { - worktree_id: worktree.id(), - path: entry.path.clone(), - }; - Some(Arc::from( - project.read(cx).absolute_path(&path, cx)?.as_path(), - )) + pub fn new(prompt_builder: Arc) -> Self { + Self { prompt_builder } } } @@ -93,12 +42,20 @@ impl SlashCommand for ProjectSlashCommand { "project".into() } + fn label(&self, cx: &AppContext) -> CodeLabel { + create_label_for_command("project", &[], cx) + } + fn description(&self) -> String { - "insert project metadata".into() + "Generate semantic searches based on the current context".into() } fn menu_text(&self) -> String { - "Insert Project Metadata".into() + "Project Context".into() + } + + fn requires_argument(&self) -> bool { + false } fn complete_argument( @@ -108,46 +65,126 @@ impl SlashCommand for ProjectSlashCommand { _workspace: Option>, _cx: &mut WindowContext, ) -> Task>> { - Task::ready(Err(anyhow!("this command does not require argument"))) - } - - fn requires_argument(&self) -> bool { - false + Task::ready(Ok(Vec::new())) } fn run( self: Arc, _arguments: &[String], - _context_slash_command_output_sections: &[SlashCommandOutputSection], - _context_buffer: BufferSnapshot, + _context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: language::BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, ) -> Task> { - let output = workspace.update(cx, |workspace, cx| { - let project = workspace.project().clone(); - let fs = workspace.project().read(cx).fs().clone(); - let path = Self::path_to_cargo_toml(project, cx); - let output = cx.background_executor().spawn(async move { - let path = path.with_context(|| "Cargo.toml not found")?; - Self::build_message(fs, &path).await - }); - - cx.foreground_executor().spawn(async move { - let text = output.await?; - let range = 0..text.len(); - Ok(SlashCommandOutput { - text, - sections: vec![SlashCommandOutputSection { - range, - icon: IconName::FileTree, - label: "Project".into(), + let model_registry = LanguageModelRegistry::read_global(cx); + let current_model = model_registry.active_model(); + let prompt_builder = self.prompt_builder.clone(); + + let Some(workspace) = workspace.upgrade() else { + return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); + }; + let project = workspace.read(cx).project().clone(); + let fs = project.read(cx).fs().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow::anyhow!("no project indexer"))); + }; + + cx.spawn(|mut cx| async move { + let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?; + + let prompt = + prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?; + + let search_queries = current_model + .use_tool::( + language_model::LanguageModelRequest { + messages: vec![language_model::LanguageModelRequestMessage { + role: language_model::Role::User, + content: vec![language_model::MessageContent::Text(prompt)], + cache: false, + }], + tools: vec![], + stop: vec![], + temperature: None, + }, + cx.deref_mut(), + ) + .await? + .search_queries; + + let results = project_index + .read_with(&cx, |project_index, cx| { + project_index.search(search_queries.clone(), 25, cx) + })? + .await?; + + let results = SemanticDb::load_results(results, &fs, &cx).await?; + + cx.background_executor() + .spawn(async move { + let mut output = "Project context:\n".to_string(); + let mut sections = Vec::new(); + + for (ix, query) in search_queries.into_iter().enumerate() { + let start_ix = output.len(); + writeln!(&mut output, "Results for {query}:").unwrap(); + let mut has_results = false; + for result in &results { + if result.query_index == ix { + add_search_result_section(result, &mut output, &mut sections); + has_results = true; + } + } + if has_results { + sections.push(SlashCommandOutputSection { + range: start_ix..output.len(), + icon: IconName::MagnifyingGlass, + label: query.into(), + metadata: None, + }); + output.push('\n'); + } else { + output.truncate(start_ix); + } + } + + sections.push(SlashCommandOutputSection { + range: 0..output.len(), + icon: IconName::Book, + label: "Project context".into(), metadata: None, - }], - run_commands_in_text: false, + }); + + Ok(SlashCommandOutput { + text: output, + sections, + run_commands_in_text: true, + }) }) - }) - }); - output.unwrap_or_else(|error| Task::ready(Err(error))) + .await + }) + } +} + +#[derive(JsonSchema, Deserialize)] +struct SearchQueries { + /// An array of semantic search queries. + /// + /// These queries will be used to search the user's codebase. + /// The function can only accept 4 queries, otherwise it will error. + /// As such, it's important that you limit the length of the search_queries array to 5 queries or less. + search_queries: Vec, +} + +impl LanguageModelTool for SearchQueries { + fn name() -> String { + "search_queries".to_string() + } + + fn description() -> String { + "Generate semantic search queries based on context".to_string() } } diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index 7e408cad396a45..f0f3ee3d25c660 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -7,7 +7,7 @@ use anyhow::Result; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use feature_flags::FeatureFlag; use gpui::{AppContext, Task, WeakView}; -use language::{CodeLabel, LineEnding, LspAdapterDelegate}; +use language::{CodeLabel, LspAdapterDelegate}; use semantic_index::{LoadedSearchResult, SemanticDb}; use std::{ fmt::Write, @@ -101,7 +101,7 @@ impl SlashCommand for SearchSlashCommand { cx.spawn(|cx| async move { let results = project_index .read_with(&cx, |project_index, cx| { - project_index.search(query.clone(), limit.unwrap_or(5), cx) + project_index.search(vec![query.clone()], limit.unwrap_or(5), cx) })? .await?; @@ -112,31 +112,8 @@ impl SlashCommand for SearchSlashCommand { .spawn(async move { let mut text = format!("Search results for {query}:\n"); let mut sections = Vec::new(); - for LoadedSearchResult { - path, - range, - full_path, - file_content, - row_range, - } in loaded_results - { - let section_start_ix = text.len(); - text.push_str(&codeblock_fence_for_path( - Some(&path), - Some(row_range.clone()), - )); - - let mut excerpt = file_content[range].to_string(); - LineEnding::normalize(&mut excerpt); - text.push_str(&excerpt); - writeln!(text, "\n```\n").unwrap(); - let section_end_ix = text.len() - 1; - sections.push(build_entry_output_section( - section_start_ix..section_end_ix, - Some(&full_path), - false, - Some(row_range.start() + 1..row_range.end() + 1), - )); + for loaded_result in &loaded_results { + add_search_result_section(loaded_result, &mut text, &mut sections); } let query = SharedString::from(query); @@ -159,3 +136,35 @@ impl SlashCommand for SearchSlashCommand { }) } } + +pub fn add_search_result_section( + loaded_result: &LoadedSearchResult, + text: &mut String, + sections: &mut Vec>, +) { + let LoadedSearchResult { + path, + full_path, + excerpt_content, + row_range, + .. + } = loaded_result; + let section_start_ix = text.len(); + text.push_str(&codeblock_fence_for_path( + Some(&path), + Some(row_range.clone()), + )); + + text.push_str(&excerpt_content); + if !text.ends_with('\n') { + text.push('\n'); + } + writeln!(text, "```\n").unwrap(); + let section_end_ix = text.len() - 1; + sections.push(build_entry_output_section( + section_start_ix..section_end_ix, + Some(&full_path), + false, + Some(row_range.start() + 1..row_range.end() + 1), + )); +} diff --git a/crates/assistant/src/slash_command_settings.rs b/crates/assistant/src/slash_command_settings.rs index eda950b6a222cf..c524b37803edea 100644 --- a/crates/assistant/src/slash_command_settings.rs +++ b/crates/assistant/src/slash_command_settings.rs @@ -10,9 +10,9 @@ pub struct SlashCommandSettings { /// Settings for the `/docs` slash command. #[serde(default)] pub docs: DocsCommandSettings, - /// Settings for the `/project` slash command. + /// Settings for the `/cargo-workspace` slash command. #[serde(default)] - pub project: ProjectCommandSettings, + pub cargo_workspace: CargoWorkspaceCommandSettings, } /// Settings for the `/docs` slash command. @@ -23,10 +23,10 @@ pub struct DocsCommandSettings { pub enabled: bool, } -/// Settings for the `/project` slash command. +/// Settings for the `/cargo-workspace` slash command. #[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] -pub struct ProjectCommandSettings { - /// Whether `/project` is enabled. +pub struct CargoWorkspaceCommandSettings { + /// Whether `/cargo-workspace` is enabled. #[serde(default)] pub enabled: bool, } diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 0580053373c54c..e2c8b42644a31d 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -438,7 +438,7 @@ async fn run_eval_project( loop { match cx.update(|cx| { let project_index = project_index.read(cx); - project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx) + project_index.search(vec![query.query.clone()], SEARCH_RESULT_LIMIT, cx) }) { Ok(task) => match task.await { Ok(answer) => { diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index 0cc3f9f317b08f..c5c2c633a10603 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -98,7 +98,7 @@ fn main() { .update(|cx| { let project_index = project_index.read(cx); let query = "converting an anchor to a point"; - project_index.search(query.into(), 4, cx) + project_index.search(vec![query.into()], 4, cx) }) .unwrap() .await diff --git a/crates/semantic_index/src/embedding.rs b/crates/semantic_index/src/embedding.rs index b05c4ac9da0ef2..1e1e0f0be71816 100644 --- a/crates/semantic_index/src/embedding.rs +++ b/crates/semantic_index/src/embedding.rs @@ -42,14 +42,23 @@ impl Embedding { self.0.len() } - pub fn similarity(self, other: &Embedding) -> f32 { - debug_assert_eq!(self.0.len(), other.0.len()); - self.0 + pub fn similarity(&self, others: &[Embedding]) -> (f32, usize) { + debug_assert!(others.iter().all(|other| self.0.len() == other.0.len())); + others .iter() - .copied() - .zip(other.0.iter().copied()) - .map(|(a, b)| a * b) - .sum() + .enumerate() + .map(|(index, other)| { + let dot_product: f32 = self + .0 + .iter() + .copied() + .zip(other.0.iter().copied()) + .map(|(a, b)| a * b) + .sum(); + (dot_product, index) + }) + .max_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal)) + .unwrap_or((0.0, 0)) } } diff --git a/crates/semantic_index/src/project_index.rs b/crates/semantic_index/src/project_index.rs index 5c35c93fa943a7..21c036d60a9bcb 100644 --- a/crates/semantic_index/src/project_index.rs +++ b/crates/semantic_index/src/project_index.rs @@ -31,20 +31,23 @@ pub struct SearchResult { pub path: Arc, pub range: Range, pub score: f32, + pub query_index: usize, } +#[derive(Debug, PartialEq, Eq)] pub struct LoadedSearchResult { pub path: Arc, - pub range: Range, pub full_path: PathBuf, - pub file_content: String, + pub excerpt_content: String, pub row_range: RangeInclusive, + pub query_index: usize, } pub struct WorktreeSearchResult { pub worktree_id: WorktreeId, pub path: Arc, pub range: Range, + pub query_index: usize, pub score: f32, } @@ -227,7 +230,7 @@ impl ProjectIndex { pub fn search( &self, - query: String, + queries: Vec, limit: usize, cx: &AppContext, ) -> Task>> { @@ -275,15 +278,18 @@ impl ProjectIndex { cx.spawn(|cx| async move { #[cfg(debug_assertions)] let embedding_query_start = std::time::Instant::now(); - log::info!("Searching for {query}"); - - let query_embeddings = embedding_provider - .embed(&[TextToEmbed::new(&query)]) - .await?; - let query_embedding = query_embeddings - .into_iter() - .next() - .ok_or_else(|| anyhow!("no embedding for query"))?; + log::info!("Searching for {queries:?}"); + let queries: Vec = queries + .iter() + .map(|s| TextToEmbed::new(s.as_str())) + .collect(); + + let query_embeddings = embedding_provider.embed(&queries[..]).await?; + if query_embeddings.len() != queries.len() { + return Err(anyhow!( + "The number of query embeddings does not match the number of queries" + )); + } let mut results_by_worker = Vec::new(); for _ in 0..cx.background_executor().num_cpus() { @@ -292,28 +298,34 @@ impl ProjectIndex { #[cfg(debug_assertions)] let search_start = std::time::Instant::now(); - cx.background_executor() .scoped(|cx| { for results in results_by_worker.iter_mut() { cx.spawn(async { while let Ok((worktree_id, path, chunk)) = chunks_rx.recv().await { - let score = chunk.embedding.similarity(&query_embedding); + let (score, query_index) = + chunk.embedding.similarity(&query_embeddings); + let ix = match results.binary_search_by(|probe| { score.partial_cmp(&probe.score).unwrap_or(Ordering::Equal) }) { Ok(ix) | Err(ix) => ix, }; - results.insert( - ix, - WorktreeSearchResult { - worktree_id, - path: path.clone(), - range: chunk.chunk.range.clone(), - score, - }, - ); - results.truncate(limit); + if ix < limit { + results.insert( + ix, + WorktreeSearchResult { + worktree_id, + path: path.clone(), + range: chunk.chunk.range.clone(), + query_index, + score, + }, + ); + if results.len() > limit { + results.pop(); + } + } } }); } @@ -333,6 +345,7 @@ impl ProjectIndex { path: result.path, range: result.range, score: result.score, + query_index: result.query_index, }) })); } diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index 6c97ece024c7fe..332b4271a003cf 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -12,8 +12,13 @@ use anyhow::{Context as _, Result}; use collections::HashMap; use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Context, Global, Model, WeakModel}; -use project::Project; -use std::{path::PathBuf, sync::Arc}; +use language::LineEnding; +use project::{Project, Worktree}; +use std::{ + cmp::Ordering, + path::{Path, PathBuf}, + sync::Arc, +}; use ui::ViewContext; use util::ResultExt as _; use workspace::Workspace; @@ -77,46 +82,127 @@ impl SemanticDb { } pub async fn load_results( - results: Vec, + mut results: Vec, fs: &Arc, cx: &AsyncAppContext, ) -> Result> { - let mut loaded_results = Vec::new(); + let mut max_scores_by_path = HashMap::<_, (f32, usize)>::default(); + for result in &results { + let (score, query_index) = max_scores_by_path + .entry((result.worktree.clone(), result.path.clone())) + .or_default(); + if result.score > *score { + *score = result.score; + *query_index = result.query_index; + } + } + + results.sort_by(|a, b| { + let max_score_a = max_scores_by_path[&(a.worktree.clone(), a.path.clone())].0; + let max_score_b = max_scores_by_path[&(b.worktree.clone(), b.path.clone())].0; + max_score_b + .partial_cmp(&max_score_a) + .unwrap_or(Ordering::Equal) + .then_with(|| a.worktree.entity_id().cmp(&b.worktree.entity_id())) + .then_with(|| a.path.cmp(&b.path)) + .then_with(|| a.range.start.cmp(&b.range.start)) + }); + + let mut last_loaded_file: Option<(Model, Arc, PathBuf, String)> = None; + let mut loaded_results = Vec::::new(); for result in results { - let (full_path, file_content) = result.worktree.read_with(cx, |worktree, _cx| { - let entry_abs_path = worktree.abs_path().join(&result.path); - let mut entry_full_path = PathBuf::from(worktree.root_name()); - entry_full_path.push(&result.path); - let file_content = async { - let entry_abs_path = entry_abs_path; - fs.load(&entry_abs_path).await + let full_path; + let file_content; + if let Some(last_loaded_file) = + last_loaded_file + .as_ref() + .filter(|(last_worktree, last_path, _, _)| { + last_worktree == &result.worktree && last_path == &result.path + }) + { + full_path = last_loaded_file.2.clone(); + file_content = &last_loaded_file.3; + } else { + let output = result.worktree.read_with(cx, |worktree, _cx| { + let entry_abs_path = worktree.abs_path().join(&result.path); + let mut entry_full_path = PathBuf::from(worktree.root_name()); + entry_full_path.push(&result.path); + let file_content = async { + let entry_abs_path = entry_abs_path; + fs.load(&entry_abs_path).await + }; + (entry_full_path, file_content) + })?; + full_path = output.0; + let Some(content) = output.1.await.log_err() else { + continue; }; - (entry_full_path, file_content) - })?; - if let Some(file_content) = file_content.await.log_err() { - let range_start = result.range.start.min(file_content.len()); - let range_end = result.range.end.min(file_content.len()); - - let start_row = file_content[0..range_start].matches('\n').count() as u32; - let end_row = file_content[0..range_end].matches('\n').count() as u32; - let start_line_byte_offset = file_content[0..range_start] - .rfind('\n') - .map(|pos| pos + 1) - .unwrap_or_default(); - let end_line_byte_offset = file_content[range_end..] + last_loaded_file = Some(( + result.worktree.clone(), + result.path.clone(), + full_path.clone(), + content, + )); + file_content = &last_loaded_file.as_ref().unwrap().3; + }; + + let query_index = max_scores_by_path[&(result.worktree.clone(), result.path.clone())].1; + + let mut range_start = result.range.start.min(file_content.len()); + let mut range_end = result.range.end.min(file_content.len()); + while !file_content.is_char_boundary(range_start) { + range_start += 1; + } + while !file_content.is_char_boundary(range_end) { + range_end += 1; + } + + let start_row = file_content[0..range_start].matches('\n').count() as u32; + let mut end_row = file_content[0..range_end].matches('\n').count() as u32; + let start_line_byte_offset = file_content[0..range_start] + .rfind('\n') + .map(|pos| pos + 1) + .unwrap_or_default(); + let mut end_line_byte_offset = range_end; + if file_content[..end_line_byte_offset].ends_with('\n') { + end_row -= 1; + } else { + end_line_byte_offset = file_content[range_end..] .find('\n') - .map(|pos| range_end + pos) + .map(|pos| range_end + pos + 1) .unwrap_or_else(|| file_content.len()); + } + let mut excerpt_content = + file_content[start_line_byte_offset..end_line_byte_offset].to_string(); + LineEnding::normalize(&mut excerpt_content); + + if let Some(prev_result) = loaded_results.last_mut() { + if prev_result.full_path == full_path { + if *prev_result.row_range.end() + 1 == start_row { + prev_result.row_range = *prev_result.row_range.start()..=end_row; + prev_result.excerpt_content.push_str(&excerpt_content); + continue; + } + } + } - loaded_results.push(LoadedSearchResult { - path: result.path, - range: start_line_byte_offset..end_line_byte_offset, - full_path, - file_content, - row_range: start_row..=end_row, - }); + loaded_results.push(LoadedSearchResult { + path: result.path, + full_path, + excerpt_content, + row_range: start_row..=end_row, + query_index, + }); + } + + for result in &mut loaded_results { + while result.excerpt_content.ends_with("\n\n") { + result.excerpt_content.pop(); + result.row_range = + *result.row_range.start()..=result.row_range.end().saturating_sub(1) } } + Ok(loaded_results) } @@ -312,7 +398,7 @@ mod tests { .update(|cx| { let project_index = project_index.read(cx); let query = "garbage in, garbage out"; - project_index.search(query.into(), 4, cx) + project_index.search(vec![query.into()], 4, cx) }) .await .unwrap(); @@ -426,4 +512,117 @@ mod tests { ], ); } + + #[gpui::test] + async fn test_load_search_results(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project_path = Path::new("/fake_project"); + + let file1_content = "one\ntwo\nthree\nfour\nfive\n"; + let file2_content = "aaa\nbbb\nccc\nddd\neee\n"; + + fs.insert_tree( + project_path, + json!({ + "file1.txt": file1_content, + "file2.txt": file2_content, + }), + ) + .await; + + let fs = fs as Arc; + let project = Project::test(fs.clone(), [project_path], cx).await; + let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + + // chunk that is already newline-aligned + let search_results = vec![SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: 0..file1_content.find("four").unwrap(), + score: 0.5, + query_index: 0, + }]; + assert_eq!( + SemanticDb::load_results(search_results, &fs, &cx.to_async()) + .await + .unwrap(), + &[LoadedSearchResult { + path: Path::new("file1.txt").into(), + full_path: "fake_project/file1.txt".into(), + excerpt_content: "one\ntwo\nthree\n".into(), + row_range: 0..=2, + query_index: 0, + }] + ); + + // chunk that is *not* newline-aligned + let search_results = vec![SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: file1_content.find("two").unwrap() + 1..file1_content.find("four").unwrap() + 2, + score: 0.5, + query_index: 0, + }]; + assert_eq!( + SemanticDb::load_results(search_results, &fs, &cx.to_async()) + .await + .unwrap(), + &[LoadedSearchResult { + path: Path::new("file1.txt").into(), + full_path: "fake_project/file1.txt".into(), + excerpt_content: "two\nthree\nfour\n".into(), + row_range: 1..=3, + query_index: 0, + }] + ); + + // chunks that are adjacent + + let search_results = vec![ + SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: file1_content.find("two").unwrap()..file1_content.len(), + score: 0.6, + query_index: 0, + }, + SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: 0..file1_content.find("two").unwrap(), + score: 0.5, + query_index: 1, + }, + SearchResult { + worktree: worktree.clone(), + path: Path::new("file2.txt").into(), + range: 0..file2_content.len(), + score: 0.8, + query_index: 1, + }, + ]; + assert_eq!( + SemanticDb::load_results(search_results, &fs, &cx.to_async()) + .await + .unwrap(), + &[ + LoadedSearchResult { + path: Path::new("file2.txt").into(), + full_path: "fake_project/file2.txt".into(), + excerpt_content: file2_content.into(), + row_range: 0..=4, + query_index: 1, + }, + LoadedSearchResult { + path: Path::new("file1.txt").into(), + full_path: "fake_project/file1.txt".into(), + excerpt_content: file1_content.into(), + row_range: 0..=4, + query_index: 0, + } + ] + ); + } } From 743feb98bcae8e00c8399be03fb27dc2b925bcdb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 20 Sep 2024 15:28:50 -0700 Subject: [PATCH 257/762] Add the ability to propose changes to a set of buffers (#18170) This PR introduces functionality for creating *branches* of buffers that can be used to preview and edit change sets that haven't yet been applied to the buffers themselves. Release Notes: - N/A --------- Co-authored-by: Marshall Bowers Co-authored-by: Marshall --- Cargo.lock | 1 - crates/assistant/src/context.rs | 9 +- crates/channel/src/channel_buffer.rs | 5 +- crates/clock/src/clock.rs | 83 ++++++---- crates/editor/src/actions.rs | 1 + crates/editor/src/editor.rs | 78 ++++++++-- crates/editor/src/element.rs | 5 +- crates/editor/src/git.rs | 24 +-- crates/editor/src/hunk_diff.rs | 24 +-- crates/editor/src/proposed_changes_editor.rs | 125 +++++++++++++++ crates/editor/src/test.rs | 6 +- crates/git/src/diff.rs | 70 ++++----- crates/language/src/buffer.rs | 154 ++++++++++++++----- crates/language/src/buffer_tests.rs | 146 ++++++++++++++++-- crates/multi_buffer/Cargo.toml | 1 - crates/multi_buffer/src/multi_buffer.rs | 46 +++--- crates/project/src/project.rs | 7 +- crates/project/src/project_tests.rs | 2 +- crates/remote_server/src/headless_project.rs | 7 +- crates/text/src/text.rs | 14 ++ 20 files changed, 622 insertions(+), 186 deletions(-) create mode 100644 crates/editor/src/proposed_changes_editor.rs diff --git a/Cargo.lock b/Cargo.lock index dd07dfa1cf0843..c0f6751b895e28 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7055,7 +7055,6 @@ dependencies = [ "ctor", "env_logger", "futures 0.3.30", - "git", "gpui", "itertools 0.13.0", "language", diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 1cac47831f52dc..4f1f885b33fe67 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -1006,9 +1006,12 @@ impl Context { cx: &mut ModelContext, ) { match event { - language::BufferEvent::Operation(operation) => cx.emit(ContextEvent::Operation( - ContextOperation::BufferOperation(operation.clone()), - )), + language::BufferEvent::Operation { + operation, + is_local: true, + } => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation( + operation.clone(), + ))), language::BufferEvent::Edited => { self.count_remaining_tokens(cx); self.reparse(cx); diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index 755e7400e1b66e..0a4a259648bb74 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -175,7 +175,10 @@ impl ChannelBuffer { cx: &mut ModelContext, ) { match event { - language::BufferEvent::Operation(operation) => { + language::BufferEvent::Operation { + operation, + is_local: true, + } => { if *ZED_ALWAYS_ACTIVE { if let language::Operation::UpdateSelections { selections, .. } = operation { if selections.is_empty() { diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index f7d36ed4a87b9d..2b45e4a8fad850 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -9,6 +9,8 @@ use std::{ pub use system_clock::*; +pub const LOCAL_BRANCH_REPLICA_ID: u16 = u16::MAX; + /// A unique identifier for each distributed node. pub type ReplicaId = u16; @@ -25,7 +27,10 @@ pub struct Lamport { /// A [vector clock](https://en.wikipedia.org/wiki/Vector_clock). #[derive(Clone, Default, Hash, Eq, PartialEq)] -pub struct Global(SmallVec<[u32; 8]>); +pub struct Global { + values: SmallVec<[u32; 8]>, + local_branch_value: u32, +} impl Global { pub fn new() -> Self { @@ -33,41 +38,51 @@ impl Global { } pub fn get(&self, replica_id: ReplicaId) -> Seq { - self.0.get(replica_id as usize).copied().unwrap_or(0) as Seq + if replica_id == LOCAL_BRANCH_REPLICA_ID { + self.local_branch_value + } else { + self.values.get(replica_id as usize).copied().unwrap_or(0) as Seq + } } pub fn observe(&mut self, timestamp: Lamport) { if timestamp.value > 0 { - let new_len = timestamp.replica_id as usize + 1; - if new_len > self.0.len() { - self.0.resize(new_len, 0); + if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID { + self.local_branch_value = cmp::max(self.local_branch_value, timestamp.value); + } else { + let new_len = timestamp.replica_id as usize + 1; + if new_len > self.values.len() { + self.values.resize(new_len, 0); + } + + let entry = &mut self.values[timestamp.replica_id as usize]; + *entry = cmp::max(*entry, timestamp.value); } - - let entry = &mut self.0[timestamp.replica_id as usize]; - *entry = cmp::max(*entry, timestamp.value); } } pub fn join(&mut self, other: &Self) { - if other.0.len() > self.0.len() { - self.0.resize(other.0.len(), 0); + if other.values.len() > self.values.len() { + self.values.resize(other.values.len(), 0); } - for (left, right) in self.0.iter_mut().zip(&other.0) { + for (left, right) in self.values.iter_mut().zip(&other.values) { *left = cmp::max(*left, *right); } + + self.local_branch_value = cmp::max(self.local_branch_value, other.local_branch_value); } pub fn meet(&mut self, other: &Self) { - if other.0.len() > self.0.len() { - self.0.resize(other.0.len(), 0); + if other.values.len() > self.values.len() { + self.values.resize(other.values.len(), 0); } let mut new_len = 0; for (ix, (left, right)) in self - .0 + .values .iter_mut() - .zip(other.0.iter().chain(iter::repeat(&0))) + .zip(other.values.iter().chain(iter::repeat(&0))) .enumerate() { if *left == 0 { @@ -80,7 +95,8 @@ impl Global { new_len = ix + 1; } } - self.0.resize(new_len, 0); + self.values.resize(new_len, 0); + self.local_branch_value = cmp::min(self.local_branch_value, other.local_branch_value); } pub fn observed(&self, timestamp: Lamport) -> bool { @@ -88,34 +104,44 @@ impl Global { } pub fn observed_any(&self, other: &Self) -> bool { - self.0 + self.values .iter() - .zip(other.0.iter()) + .zip(other.values.iter()) .any(|(left, right)| *right > 0 && left >= right) + || (other.local_branch_value > 0 && self.local_branch_value >= other.local_branch_value) } pub fn observed_all(&self, other: &Self) -> bool { - let mut rhs = other.0.iter(); - self.0.iter().all(|left| match rhs.next() { + let mut rhs = other.values.iter(); + self.values.iter().all(|left| match rhs.next() { Some(right) => left >= right, None => true, }) && rhs.next().is_none() + && self.local_branch_value >= other.local_branch_value } pub fn changed_since(&self, other: &Self) -> bool { - self.0.len() > other.0.len() + self.values.len() > other.values.len() || self - .0 + .values .iter() - .zip(other.0.iter()) + .zip(other.values.iter()) .any(|(left, right)| left > right) + || self.local_branch_value > other.local_branch_value } pub fn iter(&self) -> impl Iterator + '_ { - self.0.iter().enumerate().map(|(replica_id, seq)| Lamport { - replica_id: replica_id as ReplicaId, - value: *seq, - }) + self.values + .iter() + .enumerate() + .map(|(replica_id, seq)| Lamport { + replica_id: replica_id as ReplicaId, + value: *seq, + }) + .chain((self.local_branch_value > 0).then_some(Lamport { + replica_id: LOCAL_BRANCH_REPLICA_ID, + value: self.local_branch_value, + })) } } @@ -192,6 +218,9 @@ impl fmt::Debug for Global { } write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?; } + if self.local_branch_value > 0 { + write!(f, ": {}", self.local_branch_value)?; + } write!(f, "}}") } } diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 93c83af1950ec5..2383c7f71af8a9 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -273,6 +273,7 @@ gpui::actions!( NextScreen, OpenExcerpts, OpenExcerptsSplit, + OpenProposedChangesEditor, OpenFile, OpenPermalinkToLine, OpenUrl, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 1f4a9376d22fa5..b1a3d95a0da780 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -35,6 +35,7 @@ mod lsp_ext; mod mouse_context_menu; pub mod movement; mod persistence; +mod proposed_changes_editor; mod rust_analyzer_ext; pub mod scroll; mod selections_collection; @@ -46,7 +47,7 @@ mod signature_help; #[cfg(any(test, feature = "test-support"))] pub mod test; -use ::git::diff::{DiffHunk, DiffHunkStatus}; +use ::git::diff::DiffHunkStatus; use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry}; pub(crate) use actions::*; use aho_corasick::AhoCorasick; @@ -98,6 +99,7 @@ use language::{ }; use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange}; use linked_editing_ranges::refresh_linked_ranges; +use proposed_changes_editor::{ProposedChangesBuffer, ProposedChangesEditor}; use similar::{ChangeTag, TextDiff}; use task::{ResolvedTask, TaskTemplate, TaskVariables}; @@ -113,7 +115,9 @@ pub use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, }; -use multi_buffer::{ExpandExcerptDirection, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16}; +use multi_buffer::{ + ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16, +}; use ordered_float::OrderedFloat; use parking_lot::{Mutex, RwLock}; use project::project_settings::{GitGutterSetting, ProjectSettings}; @@ -6152,7 +6156,7 @@ impl Editor { pub fn prepare_revert_change( revert_changes: &mut HashMap, Rope)>>, multi_buffer: &Model, - hunk: &DiffHunk, + hunk: &MultiBufferDiffHunk, cx: &AppContext, ) -> Option<()> { let buffer = multi_buffer.read(cx).buffer(hunk.buffer_id)?; @@ -9338,7 +9342,7 @@ impl Editor { snapshot: &DisplaySnapshot, initial_point: Point, is_wrapped: bool, - hunks: impl Iterator>, + hunks: impl Iterator, cx: &mut ViewContext, ) -> bool { let display_point = initial_point.to_display_point(snapshot); @@ -11885,6 +11889,52 @@ impl Editor { self.searchable } + fn open_proposed_changes_editor( + &mut self, + _: &OpenProposedChangesEditor, + cx: &mut ViewContext, + ) { + let Some(workspace) = self.workspace() else { + cx.propagate(); + return; + }; + + let buffer = self.buffer.read(cx); + let mut new_selections_by_buffer = HashMap::default(); + for selection in self.selections.all::(cx) { + for (buffer, mut range, _) in + buffer.range_to_buffer_ranges(selection.start..selection.end, cx) + { + if selection.reversed { + mem::swap(&mut range.start, &mut range.end); + } + let mut range = range.to_point(buffer.read(cx)); + range.start.column = 0; + range.end.column = buffer.read(cx).line_len(range.end.row); + new_selections_by_buffer + .entry(buffer) + .or_insert(Vec::new()) + .push(range) + } + } + + let proposed_changes_buffers = new_selections_by_buffer + .into_iter() + .map(|(buffer, ranges)| ProposedChangesBuffer { buffer, ranges }) + .collect::>(); + let proposed_changes_editor = cx.new_view(|cx| { + ProposedChangesEditor::new(proposed_changes_buffers, self.project.clone(), cx) + }); + + cx.window_context().defer(move |cx| { + workspace.update(cx, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item(Box::new(proposed_changes_editor), true, true, None, cx); + }); + }); + }); + } + fn open_excerpts_in_split(&mut self, _: &OpenExcerptsSplit, cx: &mut ViewContext) { self.open_excerpts_common(true, cx) } @@ -12399,7 +12449,7 @@ impl Editor { fn hunks_for_selections( multi_buffer_snapshot: &MultiBufferSnapshot, selections: &[Selection], -) -> Vec> { +) -> Vec { let buffer_rows_for_selections = selections.iter().map(|selection| { let head = selection.head(); let tail = selection.tail(); @@ -12418,7 +12468,7 @@ fn hunks_for_selections( pub fn hunks_for_rows( rows: impl Iterator>, multi_buffer_snapshot: &MultiBufferSnapshot, -) -> Vec> { +) -> Vec { let mut hunks = Vec::new(); let mut processed_buffer_rows: HashMap>> = HashMap::default(); @@ -12430,14 +12480,14 @@ pub fn hunks_for_rows( // when the caret is just above or just below the deleted hunk. let allow_adjacent = hunk_status(&hunk) == DiffHunkStatus::Removed; let related_to_selection = if allow_adjacent { - hunk.associated_range.overlaps(&query_rows) - || hunk.associated_range.start == query_rows.end - || hunk.associated_range.end == query_rows.start + hunk.row_range.overlaps(&query_rows) + || hunk.row_range.start == query_rows.end + || hunk.row_range.end == query_rows.start } else { // `selected_multi_buffer_rows` are inclusive (e.g. [2..2] means 2nd row is selected) - // `hunk.associated_range` is exclusive (e.g. [2..3] means 2nd row is selected) - hunk.associated_range.overlaps(&selected_multi_buffer_rows) - || selected_multi_buffer_rows.end == hunk.associated_range.start + // `hunk.row_range` is exclusive (e.g. [2..3] means 2nd row is selected) + hunk.row_range.overlaps(&selected_multi_buffer_rows) + || selected_multi_buffer_rows.end == hunk.row_range.start }; if related_to_selection { if !processed_buffer_rows @@ -13738,10 +13788,10 @@ impl RowRangeExt for Range { } } -fn hunk_status(hunk: &DiffHunk) -> DiffHunkStatus { +fn hunk_status(hunk: &MultiBufferDiffHunk) -> DiffHunkStatus { if hunk.diff_base_byte_range.is_empty() { DiffHunkStatus::Added - } else if hunk.associated_range.is_empty() { + } else if hunk.row_range.is_empty() { DiffHunkStatus::Removed } else { DiffHunkStatus::Modified diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 47107b97546871..d4075431ff602b 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -346,6 +346,7 @@ impl EditorElement { register_action(view, cx, Editor::toggle_code_actions); register_action(view, cx, Editor::open_excerpts); register_action(view, cx, Editor::open_excerpts_in_split); + register_action(view, cx, Editor::open_proposed_changes_editor); register_action(view, cx, Editor::toggle_soft_wrap); register_action(view, cx, Editor::toggle_tab_bar); register_action(view, cx, Editor::toggle_line_numbers); @@ -3710,11 +3711,11 @@ impl EditorElement { ) .map(|hunk| { let start_display_row = - MultiBufferPoint::new(hunk.associated_range.start.0, 0) + MultiBufferPoint::new(hunk.row_range.start.0, 0) .to_display_point(&snapshot.display_snapshot) .row(); let mut end_display_row = - MultiBufferPoint::new(hunk.associated_range.end.0, 0) + MultiBufferPoint::new(hunk.row_range.end.0, 0) .to_display_point(&snapshot.display_snapshot) .row(); if end_display_row != start_display_row { diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index 63b083faa89bd5..79b78d5d148488 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -2,9 +2,9 @@ pub mod blame; use std::ops::Range; -use git::diff::{DiffHunk, DiffHunkStatus}; +use git::diff::DiffHunkStatus; use language::Point; -use multi_buffer::{Anchor, MultiBufferRow}; +use multi_buffer::{Anchor, MultiBufferDiffHunk}; use crate::{ display_map::{DisplaySnapshot, ToDisplayPoint}, @@ -49,25 +49,25 @@ impl DisplayDiffHunk { } pub fn diff_hunk_to_display( - hunk: &DiffHunk, + hunk: &MultiBufferDiffHunk, snapshot: &DisplaySnapshot, ) -> DisplayDiffHunk { - let hunk_start_point = Point::new(hunk.associated_range.start.0, 0); - let hunk_start_point_sub = Point::new(hunk.associated_range.start.0.saturating_sub(1), 0); + let hunk_start_point = Point::new(hunk.row_range.start.0, 0); + let hunk_start_point_sub = Point::new(hunk.row_range.start.0.saturating_sub(1), 0); let hunk_end_point_sub = Point::new( - hunk.associated_range + hunk.row_range .end .0 .saturating_sub(1) - .max(hunk.associated_range.start.0), + .max(hunk.row_range.start.0), 0, ); let status = hunk_status(hunk); let is_removal = status == DiffHunkStatus::Removed; - let folds_start = Point::new(hunk.associated_range.start.0.saturating_sub(2), 0); - let folds_end = Point::new(hunk.associated_range.end.0 + 2, 0); + let folds_start = Point::new(hunk.row_range.start.0.saturating_sub(2), 0); + let folds_end = Point::new(hunk.row_range.end.0 + 2, 0); let folds_range = folds_start..folds_end; let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| { @@ -87,7 +87,7 @@ pub fn diff_hunk_to_display( } else { let start = hunk_start_point.to_display_point(snapshot).row(); - let hunk_end_row = hunk.associated_range.end.max(hunk.associated_range.start); + let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start); let hunk_end_point = Point::new(hunk_end_row.0, 0); let multi_buffer_start = snapshot.buffer_snapshot.anchor_after(hunk_start_point); @@ -288,7 +288,7 @@ mod tests { assert_eq!( snapshot .git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.associated_range)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) .collect::>(), &expected, ); @@ -296,7 +296,7 @@ mod tests { assert_eq!( snapshot .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.associated_range)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) .collect::>(), expected .iter() diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 361ea6246e308b..917d07ec4ee85b 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -4,11 +4,12 @@ use std::{ }; use collections::{hash_map, HashMap, HashSet}; -use git::diff::{DiffHunk, DiffHunkStatus}; +use git::diff::DiffHunkStatus; use gpui::{Action, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View}; use language::Buffer; use multi_buffer::{ - Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferRow, MultiBufferSnapshot, ToPoint, + Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, + MultiBufferSnapshot, ToPoint, }; use settings::SettingsStore; use text::{BufferId, Point}; @@ -190,9 +191,9 @@ impl Editor { .buffer_snapshot .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) .filter(|hunk| { - let hunk_display_row_range = Point::new(hunk.associated_range.start.0, 0) + let hunk_display_row_range = Point::new(hunk.row_range.start.0, 0) .to_display_point(&snapshot.display_snapshot) - ..Point::new(hunk.associated_range.end.0, 0) + ..Point::new(hunk.row_range.end.0, 0) .to_display_point(&snapshot.display_snapshot); let row_range_end = display_rows_with_expanded_hunks.get(&hunk_display_row_range.start.row()); @@ -203,7 +204,7 @@ impl Editor { fn toggle_hunks_expanded( &mut self, - hunks_to_toggle: Vec>, + hunks_to_toggle: Vec, cx: &mut ViewContext, ) { let previous_toggle_task = self.expanded_hunks.hunk_update_tasks.remove(&None); @@ -274,8 +275,8 @@ impl Editor { }); for remaining_hunk in hunks_to_toggle { let remaining_hunk_point_range = - Point::new(remaining_hunk.associated_range.start.0, 0) - ..Point::new(remaining_hunk.associated_range.end.0, 0); + Point::new(remaining_hunk.row_range.start.0, 0) + ..Point::new(remaining_hunk.row_range.end.0, 0); hunks_to_expand.push(HoveredHunk { status: hunk_status(&remaining_hunk), multi_buffer_range: remaining_hunk_point_range @@ -705,7 +706,7 @@ impl Editor { fn to_diff_hunk( hovered_hunk: &HoveredHunk, multi_buffer_snapshot: &MultiBufferSnapshot, -) -> Option> { +) -> Option { let buffer_id = hovered_hunk .multi_buffer_range .start @@ -716,9 +717,8 @@ fn to_diff_hunk( let point_range = hovered_hunk .multi_buffer_range .to_point(multi_buffer_snapshot); - Some(DiffHunk { - associated_range: MultiBufferRow(point_range.start.row) - ..MultiBufferRow(point_range.end.row), + Some(MultiBufferDiffHunk { + row_range: MultiBufferRow(point_range.start.row)..MultiBufferRow(point_range.end.row), buffer_id, buffer_range, diff_base_byte_range: hovered_hunk.diff_base_byte_range.clone(), @@ -868,7 +868,7 @@ fn editor_with_deleted_text( fn buffer_diff_hunk( buffer_snapshot: &MultiBufferSnapshot, row_range: Range, -) -> Option> { +) -> Option { let mut hunks = buffer_snapshot.git_diff_hunks_in_range( MultiBufferRow(row_range.start.row)..MultiBufferRow(row_range.end.row), ); diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs new file mode 100644 index 00000000000000..3979e558a42364 --- /dev/null +++ b/crates/editor/src/proposed_changes_editor.rs @@ -0,0 +1,125 @@ +use crate::{Editor, EditorEvent}; +use collections::HashSet; +use futures::{channel::mpsc, future::join_all}; +use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View}; +use language::{Buffer, BufferEvent, Capability}; +use multi_buffer::{ExcerptRange, MultiBuffer}; +use project::Project; +use smol::stream::StreamExt; +use std::{ops::Range, time::Duration}; +use text::ToOffset; +use ui::prelude::*; +use workspace::Item; + +pub struct ProposedChangesEditor { + editor: View, + _subscriptions: Vec, + _recalculate_diffs_task: Task>, + recalculate_diffs_tx: mpsc::UnboundedSender>, +} + +pub struct ProposedChangesBuffer { + pub buffer: Model, + pub ranges: Vec>, +} + +impl ProposedChangesEditor { + pub fn new( + buffers: Vec>, + project: Option>, + cx: &mut ViewContext, + ) -> Self { + let mut subscriptions = Vec::new(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + + for buffer in buffers { + let branch_buffer = buffer.buffer.update(cx, |buffer, cx| buffer.branch(cx)); + subscriptions.push(cx.subscribe(&branch_buffer, Self::on_buffer_event)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts( + branch_buffer, + buffer.ranges.into_iter().map(|range| ExcerptRange { + context: range, + primary: None, + }), + cx, + ); + }); + } + + let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded(); + + Self { + editor: cx + .new_view(|cx| Editor::for_multibuffer(multibuffer.clone(), project, true, cx)), + recalculate_diffs_tx, + _recalculate_diffs_task: cx.spawn(|_, mut cx| async move { + let mut buffers_to_diff = HashSet::default(); + while let Some(buffer) = recalculate_diffs_rx.next().await { + buffers_to_diff.insert(buffer); + + loop { + cx.background_executor() + .timer(Duration::from_millis(250)) + .await; + let mut had_further_changes = false; + while let Ok(next_buffer) = recalculate_diffs_rx.try_next() { + buffers_to_diff.insert(next_buffer?); + had_further_changes = true; + } + if !had_further_changes { + break; + } + } + + join_all(buffers_to_diff.drain().filter_map(|buffer| { + buffer + .update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx)) + .ok()? + })) + .await; + } + None + }), + _subscriptions: subscriptions, + } + } + + fn on_buffer_event( + &mut self, + buffer: Model, + event: &BufferEvent, + _cx: &mut ViewContext, + ) { + if let BufferEvent::Edited = event { + self.recalculate_diffs_tx.unbounded_send(buffer).ok(); + } + } +} + +impl Render for ProposedChangesEditor { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + self.editor.clone() + } +} + +impl FocusableView for ProposedChangesEditor { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + self.editor.focus_handle(cx) + } +} + +impl EventEmitter for ProposedChangesEditor {} + +impl Item for ProposedChangesEditor { + type Event = EditorEvent; + + fn tab_icon(&self, _cx: &ui::WindowContext) -> Option { + Some(Icon::new(IconName::Pencil)) + } + + fn tab_content_text(&self, _cx: &WindowContext) -> Option { + Some("Proposed changes".into()) + } +} diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index fcbd3bd42314d3..50214cd723ee31 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -108,16 +108,16 @@ pub fn editor_hunks( .buffer_snapshot .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) .map(|hunk| { - let display_range = Point::new(hunk.associated_range.start.0, 0) + let display_range = Point::new(hunk.row_range.start.0, 0) .to_display_point(snapshot) .row() - ..Point::new(hunk.associated_range.end.0, 0) + ..Point::new(hunk.row_range.end.0, 0) .to_display_point(snapshot) .row(); let (_, buffer, _) = editor .buffer() .read(cx) - .excerpt_containing(Point::new(hunk.associated_range.start.0, 0), cx) + .excerpt_containing(Point::new(hunk.row_range.start.0, 0), cx) .expect("no excerpt for expanded buffer's hunk start"); let diff_base = buffer .read(cx) diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 8cc7ee186350c5..1f7930ce1442da 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -1,7 +1,7 @@ use rope::Rope; use std::{iter, ops::Range}; use sum_tree::SumTree; -use text::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, Point}; +use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point}; pub use git2 as libgit; use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; @@ -13,29 +13,30 @@ pub enum DiffHunkStatus { Removed, } -/// A diff hunk, representing a range of consequent lines in a singleton buffer, associated with a generic range. +/// A diff hunk resolved to rows in the buffer. #[derive(Debug, Clone, PartialEq, Eq)] -pub struct DiffHunk { - /// E.g. a range in multibuffer, that has an excerpt added, singleton buffer for which has this diff hunk. - /// Consider a singleton buffer with 10 lines, all of them are modified — so a corresponding diff hunk would have a range 0..10. - /// And a multibuffer with the excerpt of lines 2-6 from the singleton buffer. - /// If the multibuffer is searched for diff hunks, the associated range would be multibuffer rows, corresponding to rows 2..6 from the singleton buffer. - /// But the hunk range would be 0..10, same for any other excerpts from the same singleton buffer. - pub associated_range: Range, - /// Singleton buffer ID this hunk belongs to. - pub buffer_id: BufferId, - /// A consequent range of lines in the singleton buffer, that were changed and produced this diff hunk. +pub struct DiffHunk { + /// The buffer range, expressed in terms of rows. + pub row_range: Range, + /// The range in the buffer to which this hunk corresponds. pub buffer_range: Range, - /// Original singleton buffer text before the change, that was instead of the `buffer_range`. + /// The range in the buffer's diff base text to which this hunk corresponds. pub diff_base_byte_range: Range, } -impl sum_tree::Item for DiffHunk { +/// We store [`InternalDiffHunk`]s internally so we don't need to store the additional row range. +#[derive(Debug, Clone)] +struct InternalDiffHunk { + buffer_range: Range, + diff_base_byte_range: Range, +} + +impl sum_tree::Item for InternalDiffHunk { type Summary = DiffHunkSummary; fn summary(&self) -> Self::Summary { DiffHunkSummary { - buffer_range: self.associated_range.clone(), + buffer_range: self.buffer_range.clone(), } } } @@ -64,7 +65,7 @@ impl sum_tree::Summary for DiffHunkSummary { #[derive(Debug, Clone)] pub struct BufferDiff { last_buffer_version: Option, - tree: SumTree>, + tree: SumTree, } impl BufferDiff { @@ -79,11 +80,12 @@ impl BufferDiff { self.tree.is_empty() } + #[cfg(any(test, feature = "test-support"))] pub fn hunks_in_row_range<'a>( &'a self, range: Range, buffer: &'a BufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator { let start = buffer.anchor_before(Point::new(range.start, 0)); let end = buffer.anchor_after(Point::new(range.end, 0)); @@ -94,7 +96,7 @@ impl BufferDiff { &'a self, range: Range, buffer: &'a BufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator { let mut cursor = self .tree .filter::<_, DiffHunkSummary>(buffer, move |summary| { @@ -109,11 +111,8 @@ impl BufferDiff { }) .flat_map(move |hunk| { [ - ( - &hunk.associated_range.start, - hunk.diff_base_byte_range.start, - ), - (&hunk.associated_range.end, hunk.diff_base_byte_range.end), + (&hunk.buffer_range.start, hunk.diff_base_byte_range.start), + (&hunk.buffer_range.end, hunk.diff_base_byte_range.end), ] .into_iter() }); @@ -129,10 +128,9 @@ impl BufferDiff { } Some(DiffHunk { - associated_range: start_point.row..end_point.row, + row_range: start_point.row..end_point.row, diff_base_byte_range: start_base..end_base, buffer_range: buffer.anchor_before(start_point)..buffer.anchor_after(end_point), - buffer_id: buffer.remote_id(), }) }) } @@ -141,7 +139,7 @@ impl BufferDiff { &'a self, range: Range, buffer: &'a BufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator { let mut cursor = self .tree .filter::<_, DiffHunkSummary>(buffer, move |summary| { @@ -154,7 +152,7 @@ impl BufferDiff { cursor.prev(buffer); let hunk = cursor.item()?; - let range = hunk.associated_range.to_point(buffer); + let range = hunk.buffer_range.to_point(buffer); let end_row = if range.end.column > 0 { range.end.row + 1 } else { @@ -162,10 +160,9 @@ impl BufferDiff { }; Some(DiffHunk { - associated_range: range.start.row..end_row, + row_range: range.start.row..end_row, diff_base_byte_range: hunk.diff_base_byte_range.clone(), buffer_range: hunk.buffer_range.clone(), - buffer_id: hunk.buffer_id, }) }) } @@ -196,7 +193,7 @@ impl BufferDiff { } #[cfg(test)] - fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator> { + fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator { let start = text.anchor_before(Point::new(0, 0)); let end = text.anchor_after(Point::new(u32::MAX, u32::MAX)); self.hunks_intersecting_range(start..end, text) @@ -229,7 +226,7 @@ impl BufferDiff { hunk_index: usize, buffer: &text::BufferSnapshot, buffer_row_divergence: &mut i64, - ) -> DiffHunk { + ) -> InternalDiffHunk { let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap(); assert!(line_item_count > 0); @@ -284,11 +281,9 @@ impl BufferDiff { let start = Point::new(buffer_row_range.start, 0); let end = Point::new(buffer_row_range.end, 0); let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end); - DiffHunk { - associated_range: buffer_range.clone(), + InternalDiffHunk { buffer_range, diff_base_byte_range, - buffer_id: buffer.remote_id(), } } } @@ -302,17 +297,16 @@ pub fn assert_hunks( diff_base: &str, expected_hunks: &[(Range, &str, &str)], ) where - Iter: Iterator>, + Iter: Iterator, { let actual_hunks = diff_hunks .map(|hunk| { ( - hunk.associated_range.clone(), + hunk.row_range.clone(), &diff_base[hunk.diff_base_byte_range], buffer .text_for_range( - Point::new(hunk.associated_range.start, 0) - ..Point::new(hunk.associated_range.end, 0), + Point::new(hunk.row_range.start, 0)..Point::new(hunk.row_range.end, 0), ) .collect::(), ) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index acb57273e30eed..5735ee961651ab 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -21,8 +21,8 @@ use async_watch as watch; pub use clock::ReplicaId; use futures::channel::oneshot; use gpui::{ - AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Pixels, Task, TaskLabel, - WindowContext, + AnyElement, AppContext, Context as _, EventEmitter, HighlightStyle, Model, ModelContext, + Pixels, Task, TaskLabel, WindowContext, }; use lsp::LanguageServerId; use parking_lot::Mutex; @@ -84,11 +84,17 @@ pub enum Capability { pub type BufferRow = u32; +#[derive(Clone)] +enum BufferDiffBase { + Git(Rope), + PastBufferVersion(Model, BufferSnapshot), +} + /// An in-memory representation of a source code file, including its text, /// syntax trees, git status, and diagnostics. pub struct Buffer { text: TextBuffer, - diff_base: Option, + diff_base: Option, git_diff: git::diff::BufferDiff, file: Option>, /// The mtime of the file when this buffer was last loaded from @@ -121,6 +127,7 @@ pub struct Buffer { /// Memoize calls to has_changes_since(saved_version). /// The contents of a cell are (self.version, has_changes) at the time of a last call. has_unsaved_edits: Cell<(clock::Global, bool)>, + _subscriptions: Vec, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -308,7 +315,10 @@ pub enum Operation { pub enum BufferEvent { /// The buffer was changed in a way that must be /// propagated to its other replicas. - Operation(Operation), + Operation { + operation: Operation, + is_local: bool, + }, /// The buffer was edited. Edited, /// The buffer's `dirty` bit changed. @@ -644,7 +654,7 @@ impl Buffer { id: self.remote_id().into(), file: self.file.as_ref().map(|f| f.to_proto(cx)), base_text: self.base_text().to_string(), - diff_base: self.diff_base.as_ref().map(|h| h.to_string()), + diff_base: self.diff_base().as_ref().map(|h| h.to_string()), line_ending: proto::serialize_line_ending(self.line_ending()) as i32, saved_version: proto::serialize_version(&self.saved_version), saved_mtime: self.saved_mtime.map(|time| time.into()), @@ -734,12 +744,10 @@ impl Buffer { was_dirty_before_starting_transaction: None, has_unsaved_edits: Cell::new((buffer.version(), false)), text: buffer, - diff_base: diff_base - .map(|mut raw_diff_base| { - LineEnding::normalize(&mut raw_diff_base); - raw_diff_base - }) - .map(Rope::from), + diff_base: diff_base.map(|mut raw_diff_base| { + LineEnding::normalize(&mut raw_diff_base); + BufferDiffBase::Git(Rope::from(raw_diff_base)) + }), diff_base_version: 0, git_diff, file, @@ -759,6 +767,7 @@ impl Buffer { completion_triggers_timestamp: Default::default(), deferred_ops: OperationQueue::new(), has_conflict: false, + _subscriptions: Vec::new(), } } @@ -782,6 +791,52 @@ impl Buffer { } } + pub fn branch(&mut self, cx: &mut ModelContext) -> Model { + let this = cx.handle(); + cx.new_model(|cx| { + let mut branch = Self { + diff_base: Some(BufferDiffBase::PastBufferVersion( + this.clone(), + self.snapshot(), + )), + language: self.language.clone(), + has_conflict: self.has_conflict, + has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()), + _subscriptions: vec![cx.subscribe(&this, |branch: &mut Self, _, event, cx| { + if let BufferEvent::Operation { operation, .. } = event { + branch.apply_ops([operation.clone()], cx); + branch.diff_base_version += 1; + } + })], + ..Self::build( + self.text.branch(), + None, + self.file.clone(), + self.capability(), + ) + }; + if let Some(language_registry) = self.language_registry() { + branch.set_language_registry(language_registry); + } + + branch + }) + } + + pub fn merge(&mut self, branch: &Model, cx: &mut ModelContext) { + let branch = branch.read(cx); + let edits = branch + .edits_since::(&self.version) + .map(|edit| { + ( + edit.old, + branch.text_for_range(edit.new).collect::(), + ) + }) + .collect::>(); + self.edit(edits, None, cx); + } + #[cfg(test)] pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot { &self.text @@ -961,20 +1016,23 @@ impl Buffer { /// Returns the current diff base, see [Buffer::set_diff_base]. pub fn diff_base(&self) -> Option<&Rope> { - self.diff_base.as_ref() + match self.diff_base.as_ref()? { + BufferDiffBase::Git(rope) => Some(rope), + BufferDiffBase::PastBufferVersion(_, buffer_snapshot) => { + Some(buffer_snapshot.as_rope()) + } + } } /// Sets the text that will be used to compute a Git diff /// against the buffer text. pub fn set_diff_base(&mut self, diff_base: Option, cx: &mut ModelContext) { - self.diff_base = diff_base - .map(|mut raw_diff_base| { - LineEnding::normalize(&mut raw_diff_base); - raw_diff_base - }) - .map(Rope::from); + self.diff_base = diff_base.map(|mut raw_diff_base| { + LineEnding::normalize(&mut raw_diff_base); + BufferDiffBase::Git(Rope::from(raw_diff_base)) + }); self.diff_base_version += 1; - if let Some(recalc_task) = self.git_diff_recalc(cx) { + if let Some(recalc_task) = self.recalculate_diff(cx) { cx.spawn(|buffer, mut cx| async move { recalc_task.await; buffer @@ -992,14 +1050,21 @@ impl Buffer { self.diff_base_version } - /// Recomputes the Git diff status. - pub fn git_diff_recalc(&mut self, cx: &mut ModelContext) -> Option> { - let diff_base = self.diff_base.clone()?; + /// Recomputes the diff. + pub fn recalculate_diff(&mut self, cx: &mut ModelContext) -> Option> { + let diff_base_rope = match self.diff_base.as_mut()? { + BufferDiffBase::Git(rope) => rope.clone(), + BufferDiffBase::PastBufferVersion(base_buffer, base_buffer_snapshot) => { + let new_base_snapshot = base_buffer.read(cx).snapshot(); + *base_buffer_snapshot = new_base_snapshot; + base_buffer_snapshot.as_rope().clone() + } + }; let snapshot = self.snapshot(); let mut diff = self.git_diff.clone(); let diff = cx.background_executor().spawn(async move { - diff.update(&diff_base, &snapshot).await; + diff.update(&diff_base_rope, &snapshot).await; diff }); @@ -1169,7 +1234,7 @@ impl Buffer { lamport_timestamp, }; self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx); - self.send_operation(op, cx); + self.send_operation(op, true, cx); } fn request_autoindent(&mut self, cx: &mut ModelContext) { @@ -1743,6 +1808,7 @@ impl Buffer { lamport_timestamp, cursor_shape, }, + true, cx, ); self.non_text_state_update_count += 1; @@ -1889,7 +1955,7 @@ impl Buffer { } self.end_transaction(cx); - self.send_operation(Operation::Buffer(edit_operation), cx); + self.send_operation(Operation::Buffer(edit_operation), true, cx); Some(edit_id) } @@ -1991,6 +2057,9 @@ impl Buffer { } }) .collect::>(); + for operation in buffer_ops.iter() { + self.send_operation(Operation::Buffer(operation.clone()), false, cx); + } self.text.apply_ops(buffer_ops); self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops(cx); @@ -2114,8 +2183,16 @@ impl Buffer { } } - fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { - cx.emit(BufferEvent::Operation(operation)); + fn send_operation( + &mut self, + operation: Operation, + is_local: bool, + cx: &mut ModelContext, + ) { + cx.emit(BufferEvent::Operation { + operation, + is_local, + }); } /// Removes the selections for a given peer. @@ -2130,7 +2207,7 @@ impl Buffer { let old_version = self.version.clone(); if let Some((transaction_id, operation)) = self.text.undo() { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); self.did_edit(&old_version, was_dirty, cx); Some(transaction_id) } else { @@ -2147,7 +2224,7 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); if let Some(operation) = self.text.undo_transaction(transaction_id) { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); self.did_edit(&old_version, was_dirty, cx); true } else { @@ -2167,7 +2244,7 @@ impl Buffer { let operations = self.text.undo_to_transaction(transaction_id); let undone = !operations.is_empty(); for operation in operations { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); } if undone { self.did_edit(&old_version, was_dirty, cx) @@ -2181,7 +2258,7 @@ impl Buffer { let old_version = self.version.clone(); if let Some((transaction_id, operation)) = self.text.redo() { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); self.did_edit(&old_version, was_dirty, cx); Some(transaction_id) } else { @@ -2201,7 +2278,7 @@ impl Buffer { let operations = self.text.redo_to_transaction(transaction_id); let redone = !operations.is_empty(); for operation in operations { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); } if redone { self.did_edit(&old_version, was_dirty, cx) @@ -2218,6 +2295,7 @@ impl Buffer { triggers, lamport_timestamp: self.completion_triggers_timestamp, }, + true, cx, ); cx.notify(); @@ -2297,7 +2375,7 @@ impl Buffer { let ops = self.text.randomly_undo_redo(rng); if !ops.is_empty() { for op in ops { - self.send_operation(Operation::Buffer(op), cx); + self.send_operation(Operation::Buffer(op), true, cx); self.did_edit(&old_version, was_dirty, cx); } } @@ -3638,12 +3716,12 @@ impl BufferSnapshot { !self.git_diff.is_empty() } - /// Returns all the Git diff hunks intersecting the given - /// row range. + /// Returns all the Git diff hunks intersecting the given row range. + #[cfg(any(test, feature = "test-support"))] pub fn git_diff_hunks_in_row_range( &self, range: Range, - ) -> impl '_ + Iterator> { + ) -> impl '_ + Iterator { self.git_diff.hunks_in_row_range(range, self) } @@ -3652,7 +3730,7 @@ impl BufferSnapshot { pub fn git_diff_hunks_intersecting_range( &self, range: Range, - ) -> impl '_ + Iterator> { + ) -> impl '_ + Iterator { self.git_diff.hunks_intersecting_range(range, self) } @@ -3661,7 +3739,7 @@ impl BufferSnapshot { pub fn git_diff_hunks_intersecting_range_rev( &self, range: Range, - ) -> impl '_ + Iterator> { + ) -> impl '_ + Iterator { self.git_diff.hunks_intersecting_range_rev(range, self) } diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 23faa33316da79..1335a94dd0313f 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -6,6 +6,7 @@ use crate::Buffer; use clock::ReplicaId; use collections::BTreeMap; use futures::FutureExt as _; +use git::diff::assert_hunks; use gpui::{AppContext, BorrowAppContext, Model}; use gpui::{Context, TestAppContext}; use indoc::indoc; @@ -275,13 +276,19 @@ fn test_edit_events(cx: &mut gpui::AppContext) { |buffer, cx| { let buffer_1_events = buffer_1_events.clone(); cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() { - BufferEvent::Operation(op) => buffer1_ops.lock().push(op), + BufferEvent::Operation { + operation, + is_local: true, + } => buffer1_ops.lock().push(operation), event => buffer_1_events.lock().push(event), }) .detach(); let buffer_2_events = buffer_2_events.clone(); - cx.subscribe(&buffer2, move |_, _, event, _| { - buffer_2_events.lock().push(event.clone()) + cx.subscribe(&buffer2, move |_, _, event, _| match event.clone() { + BufferEvent::Operation { + is_local: false, .. + } => {} + event => buffer_2_events.lock().push(event), }) .detach(); @@ -2370,6 +2377,118 @@ async fn test_find_matching_indent(cx: &mut TestAppContext) { ); } +#[gpui::test] +fn test_branch_and_merge(cx: &mut TestAppContext) { + cx.update(|cx| init_settings(cx, |_| {})); + + let base_buffer = cx.new_model(|cx| Buffer::local("one\ntwo\nthree\n", cx)); + + // Create a remote replica of the base buffer. + let base_buffer_replica = cx.new_model(|cx| { + Buffer::from_proto( + 1, + Capability::ReadWrite, + base_buffer.read(cx).to_proto(cx), + None, + ) + .unwrap() + }); + base_buffer.update(cx, |_buffer, cx| { + cx.subscribe(&base_buffer_replica, |this, _, event, cx| { + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { + this.apply_ops([operation.clone()], cx); + } + }) + .detach(); + }); + + // Create a branch, which initially has the same state as the base buffer. + let branch_buffer = base_buffer.update(cx, |buffer, cx| buffer.branch(cx)); + branch_buffer.read_with(cx, |buffer, _| { + assert_eq!(buffer.text(), "one\ntwo\nthree\n"); + }); + + // Edits to the branch are not applied to the base. + branch_buffer.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(1, 0)..Point::new(1, 0), "ONE_POINT_FIVE\n")], + None, + cx, + ) + }); + branch_buffer.read_with(cx, |branch_buffer, cx| { + assert_eq!(base_buffer.read(cx).text(), "one\ntwo\nthree\n"); + assert_eq!(branch_buffer.text(), "one\nONE_POINT_FIVE\ntwo\nthree\n"); + }); + + // Edits to the base are applied to the branch. + base_buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "ZERO\n")], None, cx) + }); + branch_buffer.read_with(cx, |branch_buffer, cx| { + assert_eq!(base_buffer.read(cx).text(), "ZERO\none\ntwo\nthree\n"); + assert_eq!( + branch_buffer.text(), + "ZERO\none\nONE_POINT_FIVE\ntwo\nthree\n" + ); + }); + + assert_diff_hunks(&branch_buffer, cx, &[(2..3, "", "ONE_POINT_FIVE\n")]); + + // Edits to any replica of the base are applied to the branch. + base_buffer_replica.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(2, 0)..Point::new(2, 0), "TWO_POINT_FIVE\n")], + None, + cx, + ) + }); + branch_buffer.read_with(cx, |branch_buffer, cx| { + assert_eq!( + base_buffer.read(cx).text(), + "ZERO\none\ntwo\nTWO_POINT_FIVE\nthree\n" + ); + assert_eq!( + branch_buffer.text(), + "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" + ); + }); + + // Merging the branch applies all of its changes to the base. + base_buffer.update(cx, |base_buffer, cx| { + base_buffer.merge(&branch_buffer, cx); + assert_eq!( + base_buffer.text(), + "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" + ); + }); +} + +fn assert_diff_hunks( + buffer: &Model, + cx: &mut TestAppContext, + expected_hunks: &[(Range, &str, &str)], +) { + buffer + .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) + .detach(); + cx.executor().run_until_parked(); + + buffer.read_with(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + assert_hunks( + snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX), + &snapshot, + &buffer.diff_base().unwrap().to_string(), + expected_hunks, + ); + }); +} + #[gpui::test(iterations = 100)] fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { let min_peers = env::var("MIN_PEERS") @@ -2407,10 +2526,15 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.handle(), move |buffer, _, event, _| { - if let BufferEvent::Operation(op) = event { - network - .lock() - .broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]); + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { + network.lock().broadcast( + buffer.replica_id(), + vec![proto::serialize_operation(operation)], + ); } }) .detach(); @@ -2533,10 +2657,14 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.handle(), move |buffer, _, event, _| { - if let BufferEvent::Operation(op) = event { + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { network.lock().broadcast( buffer.replica_id(), - vec![proto::serialize_operation(op)], + vec![proto::serialize_operation(operation)], ); } }) diff --git a/crates/multi_buffer/Cargo.toml b/crates/multi_buffer/Cargo.toml index acd0c89f8ee4ad..444fe3c75c6cfb 100644 --- a/crates/multi_buffer/Cargo.toml +++ b/crates/multi_buffer/Cargo.toml @@ -27,7 +27,6 @@ collections.workspace = true ctor.workspace = true env_logger.workspace = true futures.workspace = true -git.workspace = true gpui.workspace = true itertools.workspace = true language.workspace = true diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index f6a61f562a71fc..d406f9bfaf6ac2 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5,7 +5,6 @@ use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; use futures::{channel::mpsc, SinkExt}; -use git::diff::DiffHunk; use gpui::{AppContext, EntityId, EventEmitter, Model, ModelContext}; use itertools::Itertools; use language::{ @@ -110,6 +109,19 @@ pub enum Event { DiagnosticsUpdated, } +/// A diff hunk, representing a range of consequent lines in a multibuffer. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct MultiBufferDiffHunk { + /// The row range in the multibuffer where this diff hunk appears. + pub row_range: Range, + /// The buffer ID that this hunk belongs to. + pub buffer_id: BufferId, + /// The range of the underlying buffer that this hunk corresponds to. + pub buffer_range: Range, + /// The range within the buffer's diff base that this hunk corresponds to. + pub diff_base_byte_range: Range, +} + pub type MultiBufferPoint = Point; #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq, serde::Deserialize)] @@ -1711,7 +1723,7 @@ impl MultiBuffer { } // - language::BufferEvent::Operation(_) => return, + language::BufferEvent::Operation { .. } => return, }); } @@ -3561,7 +3573,7 @@ impl MultiBufferSnapshot { pub fn git_diff_hunks_in_range_rev( &self, row_range: Range, - ) -> impl Iterator> + '_ { + ) -> impl Iterator + '_ { let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.end.0, 0), Bias::Left, &()); @@ -3599,22 +3611,19 @@ impl MultiBufferSnapshot { .git_diff_hunks_intersecting_range_rev(buffer_start..buffer_end) .map(move |hunk| { let start = multibuffer_start.row - + hunk - .associated_range - .start - .saturating_sub(excerpt_start_point.row); + + hunk.row_range.start.saturating_sub(excerpt_start_point.row); let end = multibuffer_start.row + hunk - .associated_range + .row_range .end .min(excerpt_end_point.row + 1) .saturating_sub(excerpt_start_point.row); - DiffHunk { - associated_range: MultiBufferRow(start)..MultiBufferRow(end), + MultiBufferDiffHunk { + row_range: MultiBufferRow(start)..MultiBufferRow(end), diff_base_byte_range: hunk.diff_base_byte_range.clone(), buffer_range: hunk.buffer_range.clone(), - buffer_id: hunk.buffer_id, + buffer_id: excerpt.buffer_id, } }); @@ -3628,7 +3637,7 @@ impl MultiBufferSnapshot { pub fn git_diff_hunks_in_range( &self, row_range: Range, - ) -> impl Iterator> + '_ { + ) -> impl Iterator + '_ { let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.start.0, 0), Bias::Left, &()); @@ -3673,23 +3682,20 @@ impl MultiBufferSnapshot { MultiBufferRow(0)..MultiBufferRow(1) } else { let start = multibuffer_start.row - + hunk - .associated_range - .start - .saturating_sub(excerpt_rows.start); + + hunk.row_range.start.saturating_sub(excerpt_rows.start); let end = multibuffer_start.row + hunk - .associated_range + .row_range .end .min(excerpt_rows.end + 1) .saturating_sub(excerpt_rows.start); MultiBufferRow(start)..MultiBufferRow(end) }; - DiffHunk { - associated_range: buffer_range, + MultiBufferDiffHunk { + row_range: buffer_range, diff_base_byte_range: hunk.diff_base_byte_range.clone(), buffer_range: hunk.buffer_range.clone(), - buffer_id: hunk.buffer_id, + buffer_id: excerpt.buffer_id, } }); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 435c1430243705..bd9c17ecb29b99 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2182,7 +2182,10 @@ impl Project { let buffer_id = buffer.read(cx).remote_id(); match event { - BufferEvent::Operation(operation) => { + BufferEvent::Operation { + operation, + is_local: true, + } => { let operation = language::proto::serialize_operation(operation); if let Some(ssh) = &self.ssh_session { @@ -2267,7 +2270,7 @@ impl Project { .filter_map(|buffer| { let buffer = buffer.upgrade()?; buffer - .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx)) + .update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx)) .ok() .flatten() }) diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 72a38ccba7d781..d0d67f0cda4a40 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -3288,7 +3288,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { cx.subscribe(&buffer1, { let events = events.clone(); move |_, _, event, _| match event { - BufferEvent::Operation(_) => {} + BufferEvent::Operation { .. } => {} _ => events.lock().push(event.clone()), } }) diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 54f48e36269baf..9d5c26d6c7ce1a 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -146,12 +146,15 @@ impl HeadlessProject { cx: &mut ModelContext, ) { match event { - BufferEvent::Operation(op) => cx + BufferEvent::Operation { + operation, + is_local: true, + } => cx .background_executor() .spawn(self.session.request(proto::UpdateBuffer { project_id: SSH_PROJECT_ID, buffer_id: buffer.read(cx).remote_id().to_proto(), - operations: vec![serialize_operation(op)], + operations: vec![serialize_operation(operation)], })) .detach(), _ => {} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 8d2cd97aacaaee..8bdc9fdb03d89b 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -13,6 +13,7 @@ mod undo_map; pub use anchor::*; use anyhow::{anyhow, Context as _, Result}; pub use clock::ReplicaId; +use clock::LOCAL_BRANCH_REPLICA_ID; use collections::{HashMap, HashSet}; use locator::Locator; use operation_queue::OperationQueue; @@ -715,6 +716,19 @@ impl Buffer { self.snapshot.clone() } + pub fn branch(&self) -> Self { + Self { + snapshot: self.snapshot.clone(), + history: History::new(self.base_text().clone()), + deferred_ops: OperationQueue::new(), + deferred_replicas: HashSet::default(), + lamport_clock: clock::Lamport::new(LOCAL_BRANCH_REPLICA_ID), + subscriptions: Default::default(), + edit_id_resolvers: Default::default(), + wait_for_version_txs: Default::default(), + } + } + pub fn replica_id(&self) -> ReplicaId { self.lamport_clock.replica_id } From 4f227fd3bf19fe7393d278545edfa06343dc5958 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 20 Sep 2024 18:51:34 -0600 Subject: [PATCH 258/762] Use LanguageServerName in more places (#18167) This pushes the new LanguageServerName type to more places. As both languages and language servers were identified by Arc, it was sometimes hard to tell which was intended. Release Notes: - N/A --- .../src/activity_indicator.rs | 7 ++- .../remote_editing_collaboration_tests.rs | 2 +- .../src/wasm_host/wit/since_v0_1_0.rs | 4 +- .../src/wasm_host/wit/since_v0_2_0.rs | 4 +- crates/gpui/src/shared_string.rs | 7 +++ crates/language/src/language.rs | 57 ++++++++++++++++--- crates/language/src/language_settings.rs | 32 ++++++----- crates/language_tools/src/lsp_log.rs | 2 +- crates/languages/src/c.rs | 7 ++- crates/languages/src/go.rs | 7 ++- crates/languages/src/python.rs | 21 ++++--- crates/languages/src/rust.rs | 8 +-- crates/languages/src/tailwind.rs | 9 +-- crates/languages/src/typescript.rs | 13 +++-- crates/languages/src/vtsls.rs | 8 +-- crates/languages/src/yaml.rs | 8 +-- crates/project/src/lsp_store.rs | 24 ++++---- crates/project/src/prettier_store.rs | 4 +- crates/project/src/project_settings.rs | 3 +- .../remote_server/src/remote_editing_tests.rs | 8 +-- 20 files changed, 150 insertions(+), 85 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 3f567c9e802b80..a9ae7d075d10c2 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -19,7 +19,10 @@ use workspace::{item::ItemHandle, StatusItemView, Workspace}; actions!(activity_indicator, [ShowErrorMessage]); pub enum Event { - ShowError { lsp_name: Arc, error: String }, + ShowError { + lsp_name: LanguageServerName, + error: String, + }, } pub struct ActivityIndicator { @@ -123,7 +126,7 @@ impl ActivityIndicator { self.statuses.retain(|status| { if let LanguageServerBinaryStatus::Failed { error } = &status.status { cx.emit(Event::ShowError { - lsp_name: status.name.0.clone(), + lsp_name: status.name.clone(), error: error.clone(), }); false diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index c4410fd776be7d..cdcf69cf7e9ace 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -102,7 +102,7 @@ async fn test_sharing_an_ssh_remote_project( all_language_settings(file, cx) .language(Some(&("Rust".into()))) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); diff --git a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs index 50547b6371c697..3835f58f885290 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs @@ -9,10 +9,10 @@ use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; use isahc::config::{Configurable, RedirectPolicy}; -use language::LanguageName; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; +use language::{LanguageName, LanguageServerName}; use project::project_settings::ProjectSettings; use semantic_version::SemanticVersion; use std::{ @@ -366,7 +366,7 @@ impl ExtensionImports for WasmState { .and_then(|key| { ProjectSettings::get(location, cx) .lsp - .get(&Arc::::from(key)) + .get(&LanguageServerName(key.into())) }) .cloned() .unwrap_or_default(); diff --git a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs index 7fa79c2544475b..eb6e1a09a2ae99 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs @@ -9,10 +9,10 @@ use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; use isahc::config::{Configurable, RedirectPolicy}; -use language::LanguageName; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; +use language::{LanguageName, LanguageServerName}; use project::project_settings::ProjectSettings; use semantic_version::SemanticVersion; use std::{ @@ -412,7 +412,7 @@ impl ExtensionImports for WasmState { .and_then(|key| { ProjectSettings::get(location, cx) .lsp - .get(&Arc::::from(key)) + .get(&LanguageServerName::from_proto(key)) }) .cloned() .unwrap_or_default(); diff --git a/crates/gpui/src/shared_string.rs b/crates/gpui/src/shared_string.rs index a4ed36ec211176..f5aef6adf80be6 100644 --- a/crates/gpui/src/shared_string.rs +++ b/crates/gpui/src/shared_string.rs @@ -9,6 +9,13 @@ use util::arc_cow::ArcCow; #[derive(Deref, DerefMut, Eq, PartialEq, PartialOrd, Ord, Hash, Clone)] pub struct SharedString(ArcCow<'static, str>); +impl SharedString { + /// creates a static SharedString + pub const fn new_static(s: &'static str) -> Self { + Self(ArcCow::Borrowed(s)) + } +} + impl Default for SharedString { fn default() -> Self { Self(ArcCow::Owned(Arc::default())) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 309a67a1a96a41..29a7ac1860b0c5 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -139,11 +139,52 @@ pub trait ToLspPosition { /// A name of a language server. #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] -pub struct LanguageServerName(pub Arc); +pub struct LanguageServerName(pub SharedString); +impl std::fmt::Display for LanguageServerName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(&self.0, f) + } +} + +impl AsRef for LanguageServerName { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} + +impl AsRef for LanguageServerName { + fn as_ref(&self) -> &OsStr { + self.0.as_ref().as_ref() + } +} + +impl JsonSchema for LanguageServerName { + fn schema_name() -> String { + "LanguageServerName".into() + } + + fn json_schema(_: &mut SchemaGenerator) -> Schema { + SchemaObject { + instance_type: Some(InstanceType::String.into()), + ..Default::default() + } + .into() + } +} impl LanguageServerName { + pub const fn new_static(s: &'static str) -> Self { + Self(SharedString::new_static(s)) + } + pub fn from_proto(s: String) -> Self { - Self(Arc::from(s)) + Self(s.into()) + } +} + +impl<'a> From<&'a str> for LanguageServerName { + fn from(str: &'a str) -> LanguageServerName { + LanguageServerName(str.to_string().into()) } } @@ -202,8 +243,8 @@ impl CachedLspAdapter { }) } - pub fn name(&self) -> Arc { - self.adapter.name().0.clone() + pub fn name(&self) -> LanguageServerName { + self.adapter.name().clone() } pub async fn get_language_server_command( @@ -594,7 +635,7 @@ pub struct LanguageConfig { pub block_comment: Option<(Arc, Arc)>, /// A list of language servers that are allowed to run on subranges of a given language. #[serde(default)] - pub scope_opt_in_language_servers: Vec, + pub scope_opt_in_language_servers: Vec, #[serde(default)] pub overrides: HashMap, /// A list of characters that Zed should treat as word characters for the @@ -658,7 +699,7 @@ pub struct LanguageConfigOverride { #[serde(default)] pub word_characters: Override>, #[serde(default)] - pub opt_into_language_servers: Vec, + pub opt_into_language_servers: Vec, } #[derive(Clone, Deserialize, Debug, Serialize, JsonSchema)] @@ -1479,9 +1520,9 @@ impl LanguageScope { pub fn language_allowed(&self, name: &LanguageServerName) -> bool { let config = &self.language.config; let opt_in_servers = &config.scope_opt_in_language_servers; - if opt_in_servers.iter().any(|o| *o == *name.0) { + if opt_in_servers.iter().any(|o| *o == *name) { if let Some(over) = self.config_override() { - over.opt_into_language_servers.iter().any(|o| *o == *name.0) + over.opt_into_language_servers.iter().any(|o| *o == *name) } else { false } diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 6121cb6a39a2ca..82d4208aae6eb7 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -99,7 +99,7 @@ pub struct LanguageSettings { /// special tokens: /// - `"!"` - A language server ID prefixed with a `!` will be disabled. /// - `"..."` - A placeholder to refer to the **rest** of the registered language servers for this language. - pub language_servers: Vec>, + pub language_servers: Vec, /// Controls whether inline completions are shown immediately (true) /// or manually by triggering `editor::ShowInlineCompletion` (false). pub show_inline_completions: bool, @@ -137,22 +137,24 @@ impl LanguageSettings { } pub(crate) fn resolve_language_servers( - configured_language_servers: &[Arc], + configured_language_servers: &[String], available_language_servers: &[LanguageServerName], ) -> Vec { - let (disabled_language_servers, enabled_language_servers): (Vec>, Vec>) = - configured_language_servers.iter().partition_map( - |language_server| match language_server.strip_prefix('!') { - Some(disabled) => Either::Left(disabled.into()), - None => Either::Right(language_server.clone()), - }, - ); + let (disabled_language_servers, enabled_language_servers): ( + Vec, + Vec, + ) = configured_language_servers.iter().partition_map( + |language_server| match language_server.strip_prefix('!') { + Some(disabled) => Either::Left(LanguageServerName(disabled.to_string().into())), + None => Either::Right(LanguageServerName(language_server.clone().into())), + }, + ); let rest = available_language_servers .iter() .filter(|&available_language_server| { - !disabled_language_servers.contains(&available_language_server.0) - && !enabled_language_servers.contains(&available_language_server.0) + !disabled_language_servers.contains(&available_language_server) + && !enabled_language_servers.contains(&available_language_server) }) .cloned() .collect::>(); @@ -160,10 +162,10 @@ impl LanguageSettings { enabled_language_servers .into_iter() .flat_map(|language_server| { - if language_server.as_ref() == Self::REST_OF_LANGUAGE_SERVERS { + if language_server.0.as_ref() == Self::REST_OF_LANGUAGE_SERVERS { rest.clone() } else { - vec![LanguageServerName(language_server.clone())] + vec![language_server.clone()] } }) .collect::>() @@ -295,7 +297,7 @@ pub struct LanguageSettingsContent { /// /// Default: ["..."] #[serde(default)] - pub language_servers: Option>>, + pub language_servers: Option>, /// Controls whether inline completions are shown immediately (true) /// or manually by triggering `editor::ShowInlineCompletion` (false). /// @@ -1165,7 +1167,7 @@ mod tests { names .iter() .copied() - .map(|name| LanguageServerName(name.into())) + .map(|name| LanguageServerName(name.to_string().into())) .collect::>() } diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index 53def5eb2a1195..bde5fe9b199e8d 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -236,7 +236,7 @@ impl LogStore { )); this.add_language_server( LanguageServerKind::Global { - name: LanguageServerName(Arc::from("copilot")), + name: LanguageServerName::new_static("copilot"), }, server.server_id(), Some(server.clone()), diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index 4ebb4569ef1406..8a04e0aae6f4ef 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -13,13 +13,13 @@ use util::{fs::remove_matching, maybe, ResultExt}; pub struct CLspAdapter; impl CLspAdapter { - const SERVER_NAME: &'static str = "clangd"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("clangd"); } #[async_trait(?Send)] impl super::LspAdapter for CLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -28,7 +28,8 @@ impl super::LspAdapter for CLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) + language_server_settings(delegate, &Self::SERVER_NAME, cx) + .and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index a103c4783cffad..a1a996c066ee4b 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -33,7 +33,7 @@ fn server_binary_arguments() -> Vec { pub struct GoLspAdapter; impl GoLspAdapter { - const SERVER_NAME: &'static str = "gopls"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("gopls"); } static GOPLS_VERSION_REGEX: LazyLock = @@ -46,7 +46,7 @@ static GO_ESCAPE_SUBTEST_NAME_REGEX: LazyLock = LazyLock::new(|| { #[async_trait(?Send)] impl super::LspAdapter for GoLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -71,7 +71,8 @@ impl super::LspAdapter for GoLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) + language_server_settings(delegate, &Self::SERVER_NAME, cx) + .and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index ee127c00cca846..0dce8fb6617616 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -30,7 +30,7 @@ pub struct PythonLspAdapter { } impl PythonLspAdapter { - const SERVER_NAME: &'static str = "pyright"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("pyright"); pub fn new(node: Arc) -> Self { PythonLspAdapter { node } @@ -40,7 +40,7 @@ impl PythonLspAdapter { #[async_trait(?Send)] impl LspAdapter for PythonLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -49,7 +49,7 @@ impl LspAdapter for PythonLspAdapter { ) -> Result> { Ok(Box::new( self.node - .npm_package_latest_version(Self::SERVER_NAME) + .npm_package_latest_version(Self::SERVER_NAME.as_ref()) .await?, ) as Box<_>) } @@ -62,16 +62,23 @@ impl LspAdapter for PythonLspAdapter { ) -> Result { let latest_version = latest_version.downcast::().unwrap(); let server_path = container_dir.join(SERVER_PATH); - let package_name = Self::SERVER_NAME; let should_install_language_server = self .node - .should_install_npm_package(package_name, &server_path, &container_dir, &latest_version) + .should_install_npm_package( + Self::SERVER_NAME.as_ref(), + &server_path, + &container_dir, + &latest_version, + ) .await; if should_install_language_server { self.node - .npm_install_packages(&container_dir, &[(package_name, latest_version.as_str())]) + .npm_install_packages( + &container_dir, + &[(Self::SERVER_NAME.as_ref(), latest_version.as_str())], + ) .await?; } @@ -182,7 +189,7 @@ impl LspAdapter for PythonLspAdapter { cx: &mut AsyncAppContext, ) -> Result { cx.update(|cx| { - language_server_settings(adapter.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() }) diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index a32ffe50f519f1..eebd573a7e25fa 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -25,13 +25,13 @@ use util::{fs::remove_matching, maybe, ResultExt}; pub struct RustLspAdapter; impl RustLspAdapter { - const SERVER_NAME: &'static str = "rust-analyzer"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("rust-analyzer"); } #[async_trait(?Send)] impl LspAdapter for RustLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -41,7 +41,7 @@ impl LspAdapter for RustLspAdapter { ) -> Option { let configured_binary = cx .update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx) + language_server_settings(delegate, &Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()?; @@ -60,7 +60,7 @@ impl LspAdapter for RustLspAdapter { path_lookup: None, .. }) => { - let path = delegate.which(Self::SERVER_NAME.as_ref()).await; + let path = delegate.which("rust-analyzer".as_ref()).await; let env = delegate.shell_env().await; if let Some(path) = path { diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 9a053dbd8739ce..e3e17a8fa72eb4 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -32,7 +32,8 @@ pub struct TailwindLspAdapter { } impl TailwindLspAdapter { - const SERVER_NAME: &'static str = "tailwindcss-language-server"; + const SERVER_NAME: LanguageServerName = + LanguageServerName::new_static("tailwindcss-language-server"); pub fn new(node: Arc) -> Self { TailwindLspAdapter { node } @@ -42,7 +43,7 @@ impl TailwindLspAdapter { #[async_trait(?Send)] impl LspAdapter for TailwindLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -52,7 +53,7 @@ impl LspAdapter for TailwindLspAdapter { ) -> Option { let configured_binary = cx .update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx) + language_server_settings(delegate, &Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -152,7 +153,7 @@ impl LspAdapter for TailwindLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let tailwind_user_settings = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index c65b74aa9bb14e..b09216c9703698 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -71,7 +71,8 @@ pub struct TypeScriptLspAdapter { impl TypeScriptLspAdapter { const OLD_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.js"; const NEW_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs"; - const SERVER_NAME: &'static str = "typescript-language-server"; + const SERVER_NAME: LanguageServerName = + LanguageServerName::new_static("typescript-language-server"); pub fn new(node: Arc) -> Self { TypeScriptLspAdapter { node } } @@ -97,7 +98,7 @@ struct TypeScriptVersions { #[async_trait(?Send)] impl LspAdapter for TypeScriptLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -239,7 +240,7 @@ impl LspAdapter for TypeScriptLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; if let Some(options) = override_options { @@ -304,7 +305,7 @@ impl EsLintLspAdapter { const GITHUB_ASSET_KIND: AssetKind = AssetKind::Zip; const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js"; - const SERVER_NAME: &'static str = "eslint"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("eslint"); const FLAT_CONFIG_FILE_NAMES: &'static [&'static str] = &["eslint.config.js", "eslint.config.mjs", "eslint.config.cjs"]; @@ -331,7 +332,7 @@ impl LspAdapter for EsLintLspAdapter { let workspace_root = delegate.worktree_root_path(); let eslint_user_settings = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; @@ -403,7 +404,7 @@ impl LspAdapter for EsLintLspAdapter { } fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 9499b5c54fbeba..5ec31213840bb1 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -48,11 +48,11 @@ struct TypeScriptVersions { server_version: String, } -const SERVER_NAME: &str = "vtsls"; +const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("vtsls"); #[async_trait(?Send)] impl LspAdapter for VtslsLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(SERVER_NAME.into()) + SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -74,7 +74,7 @@ impl LspAdapter for VtslsLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - language_server_settings(delegate, SERVER_NAME, cx).and_then(|s| s.binary.clone()) + language_server_settings(delegate, &SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { @@ -267,7 +267,7 @@ impl LspAdapter for VtslsLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - language_server_settings(delegate.as_ref(), SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 06360847acc803..583961f4b1c0e9 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -30,7 +30,7 @@ pub struct YamlLspAdapter { } impl YamlLspAdapter { - const SERVER_NAME: &'static str = "yaml-language-server"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("yaml-language-server"); pub fn new(node: Arc) -> Self { YamlLspAdapter { node } } @@ -39,7 +39,7 @@ impl YamlLspAdapter { #[async_trait(?Send)] impl LspAdapter for YamlLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -49,7 +49,7 @@ impl LspAdapter for YamlLspAdapter { ) -> Option { let configured_binary = cx .update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx) + language_server_settings(delegate, &Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -145,7 +145,7 @@ impl LspAdapter for YamlLspAdapter { let mut options = serde_json::json!({"[yaml]": {"editor.tabSize": tab_size}}); let project_options = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; if let Some(override_options) = project_options { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 92f37f87af4056..6a3788c8793161 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -103,7 +103,7 @@ pub struct LocalLspStore { supplementary_language_servers: HashMap)>, prettier_store: Model, - current_lsp_settings: HashMap, LspSettings>, + current_lsp_settings: HashMap, _subscription: gpui::Subscription, } @@ -138,7 +138,7 @@ impl RemoteLspStore {} pub struct SshLspStore { upstream_client: AnyProtoClient, - current_lsp_settings: HashMap, LspSettings>, + current_lsp_settings: HashMap, } #[allow(clippy::large_enum_variant)] @@ -316,8 +316,8 @@ impl LspStore { pub fn swap_current_lsp_settings( &mut self, - new_settings: HashMap, LspSettings>, - ) -> Option, LspSettings>> { + new_settings: HashMap, + ) -> Option> { match &mut self.mode { LspStoreMode::Ssh(SshLspStore { current_lsp_settings, @@ -933,7 +933,7 @@ impl LspStore { if !language_settings(Some(language), file.as_ref(), cx).enable_language_server { language_servers_to_stop.push((worktree_id, started_lsp_name.clone())); } else if let Some(worktree) = worktree { - let server_name = &adapter.name.0; + let server_name = &adapter.name; match ( current_lsp_settings.get(server_name), new_lsp_settings.get(server_name), @@ -4765,7 +4765,7 @@ impl LspStore { let project_id = self.project_id; let worktree_id = worktree.read(cx).id().to_proto(); let upstream_client = ssh.upstream_client.clone(); - let name = adapter.name().to_string(); + let name = adapter.name(); let Some(available_language) = self.languages.available_language_for_name(&language) else { log::error!("failed to find available language {language}"); @@ -4783,7 +4783,7 @@ impl LspStore { } }; - let name = adapter.name().to_string(); + let name = adapter.name(); let code_action_kinds = adapter .adapter .code_action_kinds() @@ -4809,7 +4809,7 @@ impl LspStore { .request(proto::CreateLanguageServer { project_id, worktree_id, - name, + name: name.0.to_string(), binary: Some(language_server_command), initialization_options, code_action_kinds, @@ -4892,7 +4892,7 @@ impl LspStore { ); // We need some on the SSH client, and some on SSH host - let lsp = project_settings.lsp.get(&adapter.name.0); + let lsp = project_settings.lsp.get(&adapter.name); let override_options = lsp.and_then(|s| s.initialization_options.clone()); let server_id = pending_server.server_id; @@ -5078,7 +5078,7 @@ impl LspStore { async fn shutdown_language_server( server_state: Option, - name: Arc, + name: LanguageServerName, cx: AsyncAppContext, ) { let server = match server_state { @@ -5123,7 +5123,7 @@ impl LspStore { let key = (worktree_id, adapter_name); if self.mode.is_local() { if let Some(server_id) = self.language_server_ids.remove(&key) { - let name = key.1 .0; + let name = key.1; log::info!("stopping language server {name}"); // Remove other entries for this language server as well @@ -7168,7 +7168,7 @@ impl LspAdapter for SshLspAdapter { } pub fn language_server_settings<'a, 'b: 'a>( delegate: &'a dyn LspAdapterDelegate, - language: &str, + language: &LanguageServerName, cx: &'b AppContext, ) -> Option<&'a LspSettings> { ProjectSettings::get( diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 29101917fb9aa5..75d70c1d3f72f1 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -338,7 +338,7 @@ impl PrettierStore { prettier_store .update(cx, |prettier_store, cx| { let name = if is_default { - LanguageServerName(Arc::from("prettier (default)")) + LanguageServerName("prettier (default)".to_string().into()) } else { let worktree_path = worktree_id .and_then(|id| { @@ -366,7 +366,7 @@ impl PrettierStore { } None => format!("prettier ({})", prettier_dir.display()), }; - LanguageServerName(Arc::from(name)) + LanguageServerName(name.into()) }; cx.emit(PrettierStoreEvent::LanguageServerAdded { new_server_id, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 9a7c80703c734c..904efe0a6b01f0 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1,6 +1,7 @@ use collections::HashMap; use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, EventEmitter, Model, ModelContext}; +use language::LanguageServerName; use paths::local_settings_file_relative_path; use rpc::{proto, AnyProtoClient, TypedEnvelope}; use schemars::JsonSchema; @@ -27,7 +28,7 @@ pub struct ProjectSettings { /// name to the lsp value. /// Default: null #[serde(default)] - pub lsp: HashMap, LspSettings>, + pub lsp: HashMap, /// Configuration for Git-related features #[serde(default)] diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index b7fc56d3c60262..b5ab1c40070a09 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -205,7 +205,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo AllLanguageSettings::get_global(cx) .language(Some(&"Rust".into())) .language_servers, - ["custom-rust-analyzer".into()] + ["custom-rust-analyzer".to_string()] ) }); @@ -264,7 +264,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo ) .language(Some(&"Rust".into())) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); @@ -274,7 +274,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo all_language_settings(file, cx) .language(Some(&"Rust".into())) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); } @@ -357,7 +357,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext all_language_settings(file, cx) .language(Some(&"Rust".into())) .language_servers, - ["rust-analyzer".into()] + ["rust-analyzer".to_string()] ) }); From 3ca18af40b8a7cb83d8303a8131e90ca997f09ca Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sat, 21 Sep 2024 21:01:29 +0900 Subject: [PATCH 259/762] docs: Fix typo in `configuring-zed.md` (#18178) Fix typo in `configuring-zed.md` Release Notes: - N/A --- docs/src/configuring-zed.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index de7433bf5dbad3..7cc6a4a8cb02ce 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -313,10 +313,10 @@ List of `string` values "cursor_shape": "block" ``` -3. An underline that runs along the following character: +3. An underscore that runs along the following character: ```json -"cursor_shape": "underline" +"cursor_shape": "underscore" ``` 4. An box drawn around the following character: From 1f35c8d09df9612e23d62a472d49c7021202711d Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 17:47:07 +0900 Subject: [PATCH 260/762] Fix tooltip of `always_treat_brackets_as_autoclosed` (#18191) Fixed a bug where the `always_treat_brackets_as_autoclosed` option would not display the message in the tooltip that appears when hovering. Release Notes: - N/A --- crates/language/src/language_settings.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 82d4208aae6eb7..735a9a60f87fa6 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -325,11 +325,11 @@ pub struct LanguageSettingsContent { /// /// Default: true pub use_auto_surround: Option, - // Controls how the editor handles the autoclosed characters. - // When set to `false`(default), skipping over and auto-removing of the closing characters - // happen only for auto-inserted characters. - // Otherwise(when `true`), the closing characters are always skipped over and auto-removed - // no matter how they were inserted. + /// Controls how the editor handles the autoclosed characters. + /// When set to `false`(default), skipping over and auto-removing of the closing characters + /// happen only for auto-inserted characters. + /// Otherwise(when `true`), the closing characters are always skipped over and auto-removed + /// no matter how they were inserted. /// /// Default: false pub always_treat_brackets_as_autoclosed: Option, From e7fcf83ce8d88ca36d2aa7fe8fc017c308aaf138 Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 17:48:52 +0900 Subject: [PATCH 261/762] docs: Fix misordered headings (#18192) 1. Raised the `Indent Guides` heading to level 2, which is completely unrelated to `Git`. 2. the `Git` heading now only contains `Git Gutter` and `Inline Git Blame` as subheadings. 3. The `Indent Guides` heading is now located directly after the `Git` heading. Release Notes: - N/A --- docs/src/configuring-zed.md | 88 ++++++++++++++++++------------------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 7cc6a4a8cb02ce..518dbb7f38a833 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -857,7 +857,50 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files } ``` -### Indent Guides +### Inline Git Blame + +- Description: Whether or not to show git blame information inline, on the currently focused line. +- Setting: `inline_blame` +- Default: + +```json +{ + "git": { + "inline_blame": { + "enabled": true + } + } +} +``` + +**Options** + +1. Disable inline git blame: + +```json +{ + "git": { + "inline_blame": { + "enabled": false + } + } +} +``` + +2. Only show inline git blame after a delay (that starts after cursor stops moving): + +```json +{ + "git": { + "inline_blame": { + "enabled": true, + "delay_ms": 500 + } + } +} +``` + +## Indent Guides - Description: Configuration related to indent guides. Indent guides can be configured separately for each language. - Setting: `indent_guides` @@ -926,49 +969,6 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files } ``` -### Inline Git Blame - -- Description: Whether or not to show git blame information inline, on the currently focused line. -- Setting: `inline_blame` -- Default: - -```json -{ - "git": { - "inline_blame": { - "enabled": true - } - } -} -``` - -**Options** - -1. Disable inline git blame: - -```json -{ - "git": { - "inline_blame": { - "enabled": false - } - } -} -``` - -2. Only show inline git blame after a delay (that starts after cursor stops moving): - -```json -{ - "git": { - "inline_blame": { - "enabled": true, - "delay_ms": 500 - } - } -} -``` - ## Hard Tabs - Description: Whether to indent lines using tab characters or multiple spaces. From 37c93d8fead2f33ed444c1ee8efd303a2b5a4c8c Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 18:09:35 +0900 Subject: [PATCH 262/762] docs: Add missing `base_keymap` option in `configuring-zed.md` (#18190) Added `base_keymap`, an option that works in the editor but is missing from the documentation. Release Notes: - N/A --- assets/settings/default.json | 8 ++++-- docs/src/configuring-zed.md | 56 ++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 3 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 8424c5733d81bc..e04ab90f217cdf 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -15,9 +15,11 @@ // text editor: // // 1. "VSCode" - // 2. "JetBrains" - // 3. "SublimeText" - // 4. "Atom" + // 2. "Atom" + // 3. "JetBrains" + // 4. "None" + // 5. "SublimeText" + // 6. "TextMate" "base_keymap": "VSCode", // Features that can be globally enabled or disabled "features": { diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 518dbb7f38a833..5d9a2843edf4a7 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -93,6 +93,62 @@ Extensions that provide language servers may also provide default settings for t `boolean` values +## Base Keymap + +- Description: Base key bindings scheme. Base keymaps can be overridden with user keymaps. +- Setting: `base_keymap` +- Default: `VSCode` + +**Options** + +1. VSCode + +```json +{ + "base_keymap": "VSCode" +} +``` + +2. Atom + +```json +{ + "base_keymap": "Atom" +} +``` + +3. JetBrains + +```json +{ + "base_keymap": "JetBrains" +} +``` + +4. None + +```json +{ + "base_keymap": "None" +} +``` + +5. SublimeText + +```json +{ + "base_keymap": "SublimeText" +} +``` + +6. TextMate + +```json +{ + "base_keymap": "TextMate" +} +``` + ## Buffer Font Family - Description: The name of a font to use for rendering text in the editor. From 0f4ebdfbca721614f3cadafc3b44e4fbf099afda Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 18:15:13 +0900 Subject: [PATCH 263/762] docs: Add missing `ui_font_size` option in `configuring-zed.md` (#18189) Added `ui_font_size`, an option that works in the editor but is missing from the documentation. Release Notes: - N/A --- docs/src/configuring-zed.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 5d9a2843edf4a7..c0aa4c513a5f3d 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -215,7 +215,7 @@ For example, to use `Nerd Font` as a fallback, add the following to your setting **Options** -`integer` values +`integer` values from `6` to `100` pixels (inclusive) ## Buffer Font Weight @@ -2184,6 +2184,16 @@ Float values between `0.0` and `0.9`, where: } ``` +## UI Font Size + +- Description: The default font size for text in the UI. +- Setting: `ui_font_size` +- Default: `16` + +**Options** + +`integer` values from `6` to `100` pixels (inclusive) + ## An example configuration: ```json From 75cb199a54666032e7a62dfb64739283556ae96c Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Mon, 23 Sep 2024 00:50:51 +0800 Subject: [PATCH 264/762] project: Fix typo error cause remove worktree not stop lsp (#18198) Release Notes: - N/A --- crates/project/src/project.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index bd9c17ecb29b99..78584cbae0c65f 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2006,7 +2006,7 @@ impl Project { cx.emit(Event::LanguageServerAdded(*language_server_id)) } LspStoreEvent::LanguageServerRemoved(language_server_id) => { - cx.emit(Event::LanguageServerAdded(*language_server_id)) + cx.emit(Event::LanguageServerRemoved(*language_server_id)) } LspStoreEvent::LanguageServerLog(server_id, log_type, string) => cx.emit( Event::LanguageServerLog(*server_id, log_type.clone(), string.clone()), From bb7d9d35256825c9b022a3c89c556c1521664c8d Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Mon, 23 Sep 2024 12:26:01 +0900 Subject: [PATCH 265/762] docs: Remove `default_dock_anchor` in `configuring-zed.md` (#18210) Removed the deprecated option `default_dock_anchor` in `configuring-zed.md` Note: https://zed.dev/blog/new-panel-system Release Notes: - N/A --- docs/src/configuring-zed.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index c0aa4c513a5f3d..7837044a60a669 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -381,12 +381,6 @@ List of `string` values "cursor_shape": "hollow" ``` -## Default Dock Anchor - -- Description: The default anchor for new docks. -- Setting: `default_dock_anchor` -- Default: `bottom` - **Options** 1. Position the dock attached to the bottom of the workspace: `bottom` From 05d18321db59539b56520d25f2ee95850ad911fd Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 23 Sep 2024 12:53:57 +0300 Subject: [PATCH 266/762] Resolve completions properly (#18212) Related to https://github.com/rust-lang/rust-analyzer/pull/18167 * Declare more completion item fields in the client completion resolve capabilities * Do resolve completions even if their docs are present * Instead, do not resolve completions that could not be resolved when handling the remote client resolve requests * Do replace the old lsp completion data with the resolved one Release Notes: - Improved completion resolve mechanism --- crates/lsp/src/lsp.rs | 8 ++++++- crates/project/src/lsp_store.rs | 37 ++++++++++++++++++++++++--------- crates/proto/proto/zed.proto | 1 + 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 21671cd0b13265..c2a5951de72101 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -615,8 +615,14 @@ impl LanguageServer { snippet_support: Some(true), resolve_support: Some(CompletionItemCapabilityResolveSupport { properties: vec![ - "documentation".to_string(), "additionalTextEdits".to_string(), + "command".to_string(), + "detail".to_string(), + "documentation".to_string(), + "filterText".to_string(), + "labelDetails".to_string(), + "tags".to_string(), + "textEdit".to_string(), ], }), insert_replace_support: Some(true), diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6a3788c8793161..95ca84236001ce 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1615,10 +1615,6 @@ impl LspStore { let (server_id, completion) = { let completions_guard = completions.read(); let completion = &completions_guard[completion_index]; - if completion.documentation.is_some() { - continue; - } - did_resolve = true; let server_id = completion.server_id; let completion = completion.lsp_completion.clone(); @@ -1643,10 +1639,6 @@ impl LspStore { let (server_id, completion) = { let completions_guard = completions.read(); let completion = &completions_guard[completion_index]; - if completion.documentation.is_some() { - continue; - } - let server_id = completion.server_id; let completion = completion.lsp_completion.clone(); @@ -1743,6 +1735,10 @@ impl LspStore { completion.lsp_completion.insert_text_format = completion_item.insert_text_format; } } + + let mut completions = completions.write(); + let completion = &mut completions[completion_index]; + completion.lsp_completion = completion_item; } #[allow(clippy::too_many_arguments)] @@ -1771,6 +1767,10 @@ impl LspStore { else { return; }; + let Some(lsp_completion) = serde_json::from_slice(&response.lsp_completion).log_err() + else { + return; + }; let documentation = if response.documentation.is_empty() { Documentation::Undocumented @@ -1787,6 +1787,7 @@ impl LspStore { let mut completions = completions.write(); let completion = &mut completions[completion_index]; completion.documentation = Some(documentation); + completion.lsp_completion = lsp_completion; let old_range = response .old_start @@ -4192,17 +4193,32 @@ impl LspStore { let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?; let completion = this - .read_with(&cx, |this, _| { + .read_with(&cx, |this, cx| { let id = LanguageServerId(envelope.payload.language_server_id as usize); let Some(server) = this.language_server_for_id(id) else { return Err(anyhow!("No language server {id}")); }; - Ok(server.request::(lsp_completion)) + Ok(cx.background_executor().spawn(async move { + let can_resolve = server + .capabilities() + .completion_provider + .as_ref() + .and_then(|options| options.resolve_provider) + .unwrap_or(false); + if can_resolve { + server + .request::(lsp_completion) + .await + } else { + anyhow::Ok(lsp_completion) + } + })) })?? .await?; let mut documentation_is_markdown = false; + let lsp_completion = serde_json::to_string(&completion)?.into_bytes(); let documentation = match completion.documentation { Some(lsp::Documentation::String(text)) => text, @@ -4244,6 +4260,7 @@ impl LspStore { old_start, old_end, new_text, + lsp_completion, }) } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index a886b2185556f3..a18bbe8ecf5141 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1219,6 +1219,7 @@ message ResolveCompletionDocumentationResponse { Anchor old_start = 3; Anchor old_end = 4; string new_text = 5; + bytes lsp_completion = 6; } message ResolveInlayHint { From 8a36278c9590664e881dda454ccfa7685eb5b761 Mon Sep 17 00:00:00 2001 From: moshyfawn Date: Mon, 23 Sep 2024 08:59:45 -0400 Subject: [PATCH 267/762] docs: Fix long code blocks overflow (#18208) Closes #18207 Release Notes: - N/A | Before | After | |--------|-------| | image | image | --- docs/theme/highlight.css | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/theme/highlight.css b/docs/theme/highlight.css index 9d8f39d9030b23..9bd80f351612a8 100644 --- a/docs/theme/highlight.css +++ b/docs/theme/highlight.css @@ -12,6 +12,7 @@ .hljs { color: #24292e; background: #ffffff; + overflow-x: auto; } .hljs-doctag, From d784e720274b2a9ced94aa6fcc703f53db132163 Mon Sep 17 00:00:00 2001 From: Charlie Egan Date: Mon, 23 Sep 2024 14:38:54 +0100 Subject: [PATCH 268/762] docs: Add Rego language (#18217) Release Notes: - N/A --------- Signed-off-by: Charlie Egan Co-authored-by: Charlie Egan Co-authored-by: Marshall Bowers --- docs/src/SUMMARY.md | 1 + docs/src/languages.md | 1 + docs/src/languages/rego.md | 38 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 40 insertions(+) create mode 100644 docs/src/languages/rego.md diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 8bb8035c617437..bb0c9d79f590c5 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -93,6 +93,7 @@ - [PureScript](./languages/purescript.md) - [Python](./languages/python.md) - [R](./languages/r.md) +- [Rego](./languages/rego.md) - [ReStructuredText](./languages/rst.md) - [Racket](./languages/racket.md) - [Roc](./languages/roc.md) diff --git a/docs/src/languages.md b/docs/src/languages.md index 7ec586f1f598f7..4bc6e7d3d732a5 100644 --- a/docs/src/languages.md +++ b/docs/src/languages.md @@ -45,6 +45,7 @@ Zed supports hundreds of programming languages and text formats. Some work out-o - [PureScript](./languages/purescript.md) - [Python](./languages/python.md) - [R](./languages/r.md) +- [Rego](./languages/rego.md) - [ReStructuredText](./languages/rst.md) - [Racket](./languages/racket.md) - [Roc](./languages/roc.md) diff --git a/docs/src/languages/rego.md b/docs/src/languages/rego.md new file mode 100644 index 00000000000000..3709c6a1feeb2e --- /dev/null +++ b/docs/src/languages/rego.md @@ -0,0 +1,38 @@ +# Rego + +Rego language support in Zed is provided by the community-maintained [Rego extension](https://github.com/StyraInc/zed-rego). + +- Tree Sitter: [FallenAngel97/tree-sitter-rego](https://github.com/FallenAngel97/tree-sitter-rego) +- Language Server: [StyraInc/regal](https://github.com/StyraInc/regal) + +## Installation + +The extensions is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information. + +## Configuration + +The extension's behavior is configured in the `.regal/config.yaml` file. The following is an example configuration which disables the `todo-comment` rule, customizes the `line-length` rule, and ignores test files for the `opa-fmt` rule: + +```yaml +rules: + style: + todo-comment: + # don't report on todo comments + level: ignore + line-length: + # custom rule configuration + max-line-length: 100 + # warn on too long lines, but don't fail + level: warning + opa-fmt: + # not needed as error is the default, but + # being explicit won't hurt + level: error + # files can be ignored for any individual rule + # in this example, test files are ignored + ignore: + files: + - "*_test.rego" +``` + +Read Regal's [configuration documentation](https://docs.styra.com/regal#configuration) for more information. From 2ff8dde925b75d62f030755843cd93c402a41022 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Mon, 23 Sep 2024 10:16:15 -0400 Subject: [PATCH 269/762] Use fenix toolchain in nix shell (#18227) In #17974 we explicitly depend on rustc/cargo for the nix devShell, however the fenix overlay that contains the latest stable versions was not being applied to that shell. This led to the shell inheriting whatever rustc/cargo was on nixos-unstable from nixpkgs, which sometimes lags behind. This change fixes that, and also restructures the flake to ensure that all outputs rely on the overlaid `pkgs`. Release Notes: - N/A --- flake.lock | 18 ++++++++--------- flake.nix | 56 +++++++++++++++++++++++++++------------------------ nix/shell.nix | 3 +-- 3 files changed, 40 insertions(+), 37 deletions(-) diff --git a/flake.lock b/flake.lock index a5b7a7a6ae9c46..5666e73569f7dc 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "crane": { "locked": { - "lastModified": 1725409566, - "narHash": "sha256-PrtLmqhM6UtJP7v7IGyzjBFhbG4eOAHT6LPYOFmYfbk=", + "lastModified": 1727060013, + "narHash": "sha256-/fC5YlJy4IoAW9GhkJiwyzk0K/gQd9Qi4rRcoweyG9E=", "owner": "ipetkov", "repo": "crane", - "rev": "7e4586bad4e3f8f97a9271def747cf58c4b68f3c", + "rev": "6b40cc876c929bfe1e3a24bf538ce3b5622646ba", "type": "github" }, "original": { @@ -23,11 +23,11 @@ "rust-analyzer-src": "rust-analyzer-src" }, "locked": { - "lastModified": 1726813972, - "narHash": "sha256-t6turZgoSAVgj7hn5mxzNlLOeVeZvymFo8+ymB52q34=", + "lastModified": 1727073227, + "narHash": "sha256-1kmkEQmFfGVuPBasqSZrNThqyMDV1SzTalQdRZxtDRs=", "owner": "nix-community", "repo": "fenix", - "rev": "251caeafc75b710282ee7e375800f75f4c8c5727", + "rev": "88cc292eb3c689073c784d6aecc0edbd47e12881", "type": "github" }, "original": { @@ -53,11 +53,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1726642912, - "narHash": "sha256-wiZzKGHRAhItEuoE599Wm3ic+Lg/NykuBvhb+awf7N8=", + "lastModified": 1726937504, + "narHash": "sha256-bvGoiQBvponpZh8ClUcmJ6QnsNKw0EMrCQJARK3bI1c=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "395c52d142ec1df377acd67db6d4a22950b02a98", + "rev": "9357f4f23713673f310988025d9dc261c20e70c6", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 7d1410ac7c20b7..2ee86c446685fc 100644 --- a/flake.nix +++ b/flake.nix @@ -17,27 +17,34 @@ fenix, ... }: let - forAllSystems = function: - nixpkgs.lib.genAttrs [ - "x86_64-linux" - "aarch64-linux" - ] (system: - function (import nixpkgs { - inherit system; - overlays = [fenix.overlays.default]; - })); - in { - packages = forAllSystems (pkgs: let - craneLib = (crane.mkLib pkgs).overrideToolchain (p: p.fenix.stable.toolchain); - rustPlatform = pkgs.makeRustPlatform { - inherit (pkgs.fenix.stable.toolchain) cargo rustc; + systems = ["x86_64-linux" "aarch64-linux"]; + + overlays = { + fenix = fenix.overlays.default; + rust-toolchain = final: prev: { + rustToolchain = final.fenix.stable.toolchain; }; - nightlyBuild = pkgs.callPackage ./nix/build.nix { - inherit craneLib rustPlatform; + zed-editor = final: prev: { + zed-editor = final.callPackage ./nix/build.nix { + craneLib = (crane.mkLib final).overrideToolchain final.rustToolchain; + rustPlatform = final.makeRustPlatform { + inherit (final.rustToolchain) cargo rustc; + }; + }; + }; + }; + + mkPkgs = system: + import nixpkgs { + inherit system; + overlays = builtins.attrValues overlays; }; - in { - zed-editor = nightlyBuild; - default = nightlyBuild; + + forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f (mkPkgs system)); + in { + packages = forAllSystems (pkgs: { + zed-editor = pkgs.zed-editor; + default = pkgs.zed-editor; }); devShells = forAllSystems (pkgs: { @@ -46,13 +53,10 @@ formatter = forAllSystems (pkgs: pkgs.alejandra); - overlays.default = final: prev: { - zed-editor = final.callPackage ./nix/build.nix { - craneLib = (crane.mkLib final).overrideToolchain (p: p.fenix.stable.toolchain); - rustPlatform = final.makeRustPlatform { - inherit (final.fenix.stable.toolchain) cargo rustc; - }; + overlays = + overlays + // { + default = nixpkgs.lib.composeManyExtensions (builtins.attrValues overlays); }; - }; }; } diff --git a/nix/shell.nix b/nix/shell.nix index 476374b67ef091..e0b4018778c87d 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -20,8 +20,7 @@ in wayland xorg.libxcb vulkan-loader - rustc - cargo + rustToolchain ]; in pkgs.mkShell.override {inherit stdenv;} { From 35a80f07e02054b281a946ead549d24499dcfcec Mon Sep 17 00:00:00 2001 From: Nathan Lovato <12694995+NathanLovato@users.noreply.github.com> Date: Mon, 23 Sep 2024 17:01:32 +0200 Subject: [PATCH 270/762] docs: Split vim mode documentation into two pages, edit for clarity (#17614) Closes #17215 Release Notes: - N/A --- This PR builds upon the vim mode documentation page and aims bring the following improvements: - Separate vim mode-specific configuration from introducing vim mode. - Reformat some lists of provided commands and keymaps from code blocks to sub-sections containing tables. - Flesh out the text a little bit to make it more explicit in some parts. - Generally format notes and a couple of other things closer to some other docs pages. Checking the diff doesn't give a good idea of the changes, so here are some before after images for quick examples of the kinds of changes brought by this PR. **Introducing the key differences of Zed's vim mode** Before ![2024-09-09_22-12](https://github.com/user-attachments/assets/447418cb-a6e6-4f9c-8d4b-6d941126979e) After ![2024-09-09_22-16](https://github.com/user-attachments/assets/be69f2d9-c3ae-4b34-978a-344130bee37c) --- **Zed-specific vim key bindings** Before ![2024-09-09_22-17](https://github.com/user-attachments/assets/88fdc512-a50b-487d-85d1-5988f15c2a6f) After ![2024-09-09_22-18](https://github.com/user-attachments/assets/3b77c2f6-0ffa-4afc-a86d-1210ac706c8c) --- docs/src/SUMMARY.md | 2 +- docs/src/vim.md | 607 ++++++++++++++++++++++++++------------------ 2 files changed, 366 insertions(+), 243 deletions(-) diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index bb0c9d79f590c5..f0e4784f89cd92 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -17,7 +17,7 @@ - [Snippets](./snippets.md) - [Themes](./themes.md) -- [Vim](./vim.md) +- [Vim Mode](./vim.md) # Using Zed diff --git a/docs/src/vim.md b/docs/src/vim.md index 777534813f9657..8bfa6aa73f6121 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -1,14 +1,35 @@ # Vim Mode -Zed includes a Vim emulation layer known as "vim mode". On this page, you will learn how to turn Zed's vim mode on or off, what tools and commands are available, and how to customize keybindings. +Zed includes a Vim emulation layer known as "vim mode". On this page, you will learn how to turn Zed's vim mode on or off, what tools and commands Zed provides to help you navigate and edit your code, and generally how to make the most of vim mode in Zed. -## Philosophy +You'll learn how to: -Vim mode tries to offer a familiar experience to Vim users: it replicates the behavior of motions and commands precisely when it makes sense and uses Zed-specific functionality to provide an editing experience that "just works" without requiring configuration on your part. This includes support for semantic navigation, multiple cursors, or other features usually provided by plugins like surrounding text. +- Understand the core differences between Zed's vim mode and traditional Vim +- Enable or disable vim mode +- Make the most of Zed-specific features within vim mode +- Customize vim mode key bindings +- Configure vim mode settings + +Whether you're new to vim mode or an experienced Vim user looking to optimize your Zed experience, this guide will help you harness the full power of modal editing in Zed. + +## Zed's vim mode design + +Vim mode tries to offer a familiar experience to Vim users: it replicates the behavior of motions and commands precisely when it makes sense and uses Zed-specific functionality to provide an editing experience that "just works" without requiring configuration on your part. + +This includes support for semantic navigation, multiple cursors, or other features usually provided by plugins like surrounding text. So, Zed's vim mode does not replicate Vim one-to-one, but it meshes Vim's modal design with Zed's modern features to provide a more fluid experience. It's also configurable, so you can add your own key bindings or override the defaults. -> **Note:** The foundations of Zed's vim mode should already cover many use cases, and we're always looking to improve it. If you find missing features that you rely on in your workflow, please [file an issue](https://github.com/zed-industries/zed/issues). +### Core differences + +There are four types of features in vim mode that use Zed's core functionality, leading to some differences in behavior: + +1. **Motions**: vim mode uses Zed's semantic parsing to tune the behavior of motions per language. For example, in Rust, jumping to matching bracket with `%` works with the pipe character `|`. In JavaScript, `w` considers `$` to be a word character. +2. **Visual block selections**: vim mode uses Zed's multiple cursor to emulate visual block selections, making block selections a lot more flexible. For example, anything you insert after a block selection updates on every line in real-time, and you can add or remove cursors anytime. +3. **Macros**: vim mode uses Zed's recording system for vim macros. So, you can capture and replay more complex actions, like autocompletion. +4. **Search and replace**: vim mode uses Zed's search system, so, the syntax for regular expressions is slightly different compared to Vim. [Head to the Regex differences section](#regex-differences) for details. + +> **Note:** The foundations of Zed's vim mode should already cover many use cases, and we're always looking to improve it. If you find missing features that you rely on in your workflow, please [file an issue on GitHub](https://github.com/zed-industries/zed/issues). ## Enabling and disabling vim mode @@ -16,136 +37,351 @@ When you first open Zed, you'll see a checkbox on the welcome screen that allows If you missed this, you can toggle vim mode on or off anytime by opening the command palette and using the workspace command `toggle vim mode`. +> **Note**: This command toggles the following property in your user settings: +> +> ```json +> { +> "vim_mode": true +> } +> ``` + ## Zed-specific features Zed is built on a modern foundation that (among other things) uses tree-sitter and language servers to understand the content of the file you're editing and supports multiple cursors out of the box. Vim mode has several "core Zed" key bindings that will help you make the most of Zed's specific feature set. +### Language server + +The following commands use the language server to help you navigate and refactor your code. + +| Command | Default Shortcut | +| ---------------------------------------- | ---------------- | +| Go to definition | `g d` | +| Go to declaration | `g D` | +| Go to type definition | `g y` | +| Go to implementation | `g I` | +| Rename (change definition) | `c d` | +| Go to All references to the current word | `g A` | +| Find symbol in current file | `g s` | +| Find symbol in entire project | `g S` | +| Go to next diagnostic | `g ]` or `] d` | +| Go to previous diagnostic | `g [` or `[ d` | +| Show inline error (hover) | `g h` | +| Open the code actions menu | `g .` | + +### Git + +| Command | Default Shortcut | +| ------------------------- | ---------------- | +| Go to next git change | `] c` | +| Go to previous git change | `[ c` | + +### Treesitter + +Treesitter is a powerful tool that Zed uses to understand the structure of your code. These commands help you navigate your code semantically. + +| Command | Default Shortcut | +| ---------------------------- | ---------------- | +| Select a smaller syntax node | `] x` | +| Select a larger syntax node | `[ x` | + +### Multi cursor + +These commands help you manage multiple cursors in Zed. + +| Command | Default Shortcut | +| ------------------------------------------------------------ | ---------------- | +| Add a cursor selecting the next copy of the current word | `g l` | +| Add a cursor selecting the previous copy of the current word | `g L` | +| Skip latest word selection, and add next | `g >` | +| Skip latest word selection, and add previous | `g <` | +| Add a visual selection for every copy of the current word | `g a` | + +### Pane management + +These commands open new panes or jump to specific panes. + +| Command | Default Shortcut | +| ------------------------------------------ | ------------------ | +| Open a project-wide search | `g /` | +| Open the current search excerpt | `g ` | +| Open the current search excerpt in a split | ` ` | +| Go to definition in a split | ` g d` | +| Go to type definition in a split | ` g D` | + +### In insert mode + +The following commands help you bring up Zed's completion menu, request a suggestion from GitHub Copilot, or open the inline AI assistant without leaving insert mode. + +| Command | Default Shortcut | +| ---------------------------------------------------------------------------- | ---------------- | +| Open the completion menu | `ctrl-x ctrl-o` | +| Request GitHub Copilot suggestion (requires GitHub Copilot to be configured) | `ctrl-x ctrl-c` | +| Open the inline AI assistant (requires a configured assistant) | `ctrl-x ctrl-a` | +| Open the code actions menu | `ctrl-x ctrl-l` | +| Hides all suggestions | `ctrl-x ctrl-z` | + +### Supported plugins + +Zed's vim mode includes some features that are usually provided by very popular plugins in the Vim ecosystem: + +- You can surround text objects with `ys` (yank surround), change surrounding with `cs`, and delete surrounding with `ds`. +- You can comment and uncomment selections with `gc` in visual mode and `gcc` in normal mode. +- The project panel supports many shortcuts modeled after the Vim plugin `netrw`: navigation with `hjkl`, open file with `o`, open file in a new tab with `t`, etc. +- You can add key bindings to your keymap to navigate "camelCase" names. [Head down to the Optional key bindings](#optional-key-bindings) section to learn how. + +## Command palette + +Vim mode allows you to open Zed's command palette with `:`. You can then type to access any usual Zed command. Additionally, vim mode adds aliases for popular Vim commands to ensure your muscle memory transfers to Zed. For example, you can write `:w` or `:write` to save the file. + +Below, you'll find tables listing the commands you can use in the command palette. We put optional characters in square brackets to indicate that you can omit them. + +> **Note**: We don't emulate the full power of Vim's command line yet. In particular, commands currently do not support arguments. Please [file issues on GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. + +### File and window management + +This table shows commands for managing windows, tabs, and panes. As commands don't support arguments currently, you cannot specify a filename when saving or creating a new file. + +| Command | Description | +| -------------- | ---------------------------------------------------- | +| `:w[rite][!]` | Save the current file | +| `:wq[!]` | Save the file and close the buffer | +| `:q[uit][!]` | Close the buffer | +| `:wa[ll][!]` | Save all open files | +| `:wqa[ll][!]` | Save all open files and close all buffers | +| `:qa[ll][!]` | Close all buffers | +| `:[e]x[it][!]` | Close the buffer | +| `:up[date]` | Save the current file | +| `:cq` | Quit completely (close all running instances of Zed) | +| `:vs[plit]` | Split the pane vertically | +| `:sp[lit]` | Split the pane horizontally | +| `:new` | Create a new file in a horizontal split | +| `:vne[w]` | Create a new file in a vertical split | +| `:tabedit` | Create a new file in a new tab | +| `:tabnew` | Create a new file in a new tab | +| `:tabn[ext]` | Go to the next tab | +| `:tabp[rev]` | Go to previous tab | +| `:tabc[lose]` | Close the current tab | + +> **Note:** The `!` character is used to force the command to execute without saving changes or prompting before overwriting a file. + +### Ex commands + +These ex commands open Zed's various panels and windows. + +| Command | Default Shortcut | +| ---------------------------- | ---------------- | +| Open the project panel | `:E[xplore]` | +| Open the collaboration panel | `:C[ollab]` | +| Open the chat panel | `:Ch[at]` | +| Open the AI panel | `:A[I]` | +| Open the notifications panel | `:No[tif]` | +| Open the feedback window | `:fe[edback]` | +| Open the diagnostics window | `:cl[ist]` | +| Open the terminal | `:te[rm]` | +| Open the extensions window | `:Ext[ensions]` | + +### Navigating diagnostics + +These commands navigate diagnostics. + +| Command | Description | +| ------------------------ | ------------------------------ | +| `:cn[ext]` or `:ln[ext]` | Go to the next diagnostic | +| `:cp[rev]` or `:lp[rev]` | Go to the previous diagnostics | +| `:cc` or `:ll` | Open the errors page | + +### Git + +These commands interact with the version control system git. + +| Command | Description | +| --------------- | ------------------------------------------------------- | +| `:dif[fupdate]` | View the diff under the cursor (`d o` in normal mode) | +| `:rev[ert]` | Revert the diff under the cursor (`d p` in normal mode) | + +### Jump + +These commands jump to specific positions in the file. + +| Command | Description | +| ------------------- | ----------------------------------- | +| `:` | Jump to a line number | +| `:$` | Jump to the end of the file | +| `:/foo` and `:?foo` | Jump to next/prev line matching foo | + +### Replacement + +This command replaces text. It emulates the substitute command in vim. The substitute command uses regular expressions, and Zed uses a slightly different syntax than vim. You can learn more about Zed's syntax below, [in the regex differences section](#regex-differences). Also, by default, Zed always replaces all occurrences of the search pattern in the current line. + +| Command | Description | +| -------------------- | --------------------------------- | +| `:[range]s/foo/bar/` | Replace instances of foo with bar | + +### Editing + +These commands help you edit text. + +| Command | Description | +| ----------------- | ------------------------------------------------------- | +| `:j[oin]` | Join the current line | +| `:d[elete][l][p]` | Delete the current line | +| `:s[ort] [i]` | Sort the current selection (with i, case-insensitively) | +| `:y[ank]` | Yank (copy) the current selection or line | + +### Command mnemonics + +As any Zed command is available, you may find that it's helpful to remember mnemonics that run the correct command. For example: + +- `:diffs` for "toggle all hunk diffs" +- `:cpp` for "copy path to file" +- `:crp` for "copy relative path" +- `:reveal` for "reveal in finder" +- `:zlog` for "open zed log" +- `:clank` for "cancel language server work" + +## Customizing key bindings + +In this section, we'll learn how to customize the key bindings of Zed's vim mode. You'll learn: + +- How to select the correct context for your new key bindings. +- Useful contexts for vim mode key bindings. +- Common key bindings to customize for extra productivity. + +### Selecting the correct context + +Zed's key bindings are evaluated only when the `"context"` property matches your location in the editor. For example, if you add key bindings to the `"Editor"` context, they will only work when you're editing a file. If you add key bindings to the `"Workspace"` context, they will work everywhere in Zed. Here's an example of a key binding that saves when you're editing a file: + +```json +{ + "context": "Editor", + "bindings": { + "ctrl-s": "file::Save" + } +} ``` -# Language server -g d Go to definition -g D Go to declaration -g y Go to type definition -g I Go to implementation - -c d Rename (change definition) -g A Go to All references to the current word - -g s Find symbol in current file -g S Find symbol in entire project - -g ] Go to next diagnostic -g [ Go to previous diagnostic -] d Go to next diagnostic -[ d Go to previous diagnostic -g h Show inline error (hover) -g . Open the code actions menu - -# Git -] c Go to next git change -[ c Go to previous git change - -# Treesitter -] x Select a smaller syntax node -[ x Select a larger syntax node - -# Multi cursor -g l Add a visual selection for the next copy of the current word -g L The same, but backwards -g > Skip latest word selection, and add next. -g < The same, but backwards -g a Add a visual selection for every copy of the current word - -# Pane management -g / Open a project-wide search -g Open the current search excerpt - Open the current search excerpt in a split - g d Go to definition in a split - g D Go to type definition in a split - -# Insert mode -ctrl-x ctrl-o Open the completion menu -ctrl-x ctrl-c Request GitHub Copilot suggestion (if configured) -ctrl-x ctrl-a Open the inline AI assistant (if configured) -ctrl-x ctrl-l Open the code actions menu -ctrl-x ctrl-z Hides all suggestions - -# Ex commands -:E[xplore] Open the project panel -:C[ollab] Open the collaboration panel -:Ch[at] Open the chat panel -:A[I] Open the AI panel -:No[tif] Open the notifications panel -:fe[edback] Open the feedback window -:cl[ist] Open the diagnostics window -:te[rm] Open the terminal -:Ext[ensions] Open the extensions window + +Contexts are nested, so when you're editing a file, the context is the `"Editor"` context, which is inside the `"Pane"` context, which is inside the `"Workspace"` context. That's why any key bindings you add to the `"Workspace"` context will work when you're editing a file. Here's an example: + +```json +// This key binding will work when you're editing a file. It comes built into Zed by default as the workspace: save command. +{ + "context": "Workspace", + "bindings": { + "ctrl-s": "file::Save" + } +} ``` -Vim mode uses Zed to define concepts like "brackets" (for the `%` key) and "words" (for motions like `w` and `e`). This does lead to some differences, but they are mostly positive. For example `%` considers `|` to be a bracket in languages like Rust; and `w` considers `$` to be a word-character in languages like JavaScript. +Contexts are expressions. They support boolean operators like `&&` (and) and `||` (or). For example, you can use the context `"Editor && vim_mode == normal"` to create key bindings that only work when you're editing a file _and_ you're in vim's normal mode. -Vim mode emulates visual block mode using Zed's multiple cursor support. This again leads to some differences, but is much more powerful. +Vim mode adds several contexts to the `"Editor"` context: -Vim's macro support (`q` and `@`) is implemented using Zed's actions. This lets us support recording and replaying of autocompleted code, etc. Unlike Vim, Zed does not re-use the yank registers for recording macros, they are two separate namespaces. +| Operator | Description | +| -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| VimControl | Indicates that vim keybindings should work. Currently an alias for `vim_mode == normal \|\| vim_mode == visual \|\| vim_mode == operator`, but the definition may change over time | +| vim_mode == normal | Normal mode | +| vim_mode == visual | Visual mode | +| vim_mode == insert | Insert mode | +| vim_mode == replace | Replace mode | +| vim_mode == waiting | Waiting for an arbitrary key (e.g., after typing `f` or `t`) | +| vim_mode == operator | Waiting for another binding to trigger (e.g., after typing `c` or `d`) | +| vim_operator | Set to `none` unless `vim_mode == operator`, in which case it is set to the current operator's default keybinding (e.g., after typing `d`, `vim_operator == d`) | -Finally, vim mode's search and replace functionality is backed by Zed's. This means that the pattern syntax is slightly different, see the section on [Regex differences](#regex-differences) for details. +> **Note**: Contexts are matched only on one level at a time. So it is possible to use the expression `"Editor && vim_mode == normal"`, but `"Workspace && vim_mode == normal"` will never match because we set the vim context at the `"Editor"` level. -## Custom key bindings +### Useful contexts for vim mode key bindings -You can edit your personal key bindings with `:keymap`. -For vim-specific shortcuts, you may find the following template a good place to start. +Here's a template with useful vim mode contexts to help you customize your vim mode key bindings. You can copy it and integrate it into your user keymap. ```json [ { "context": "VimControl && !menu", "bindings": { - // put key-bindings here if you want them to work in normal & visual mode + // Put key bindings here if you want them to work in normal & visual mode. } }, { "context": "vim_mode == normal && !menu", "bindings": { - // "shift-y": ["workspace::SendKeystrokes", "y $"] // use nvim's Y behavior + // "shift-y": ["workspace::SendKeystrokes", "y $"] // Use neovim's yank behavior: yank to end of line. } }, { "context": "vim_mode == insert", "bindings": { - // "j k": "vim::NormalBefore" // remap jk in insert mode to escape. + // "j k": "vim::NormalBefore" // In insert mode, make jk escape to normal mode. } }, { "context": "EmptyPane || SharedScreen", "bindings": { - // put key-bindings here (in addition to above) if you want them to - // work when no editor exists + // Put key bindings here (in addition to the context above) if you want them to + // work when no editor exists. // "space f": "file_finder::Toggle" } } ] ``` -If you would like to emulate vim's `map` (`nmap` etc.) commands you can bind to the [`workspace::SendKeystrokes`](./key-bindings.md#remapping-keys) action in the correct context. +> **Note**: If you would like to emulate Vim's `map` commands (`nmap`, etc.), you can use the action `workspace::SendKeystrokes` in the correct context. -Check out the [bindings that are enabled by default in vim mode](https://github.com/zed-industries/zed/blob/main/assets/keymaps/vim.json). +### Optional key bindings -### Contexts +By default, you can navigate between the different files open in the editor with shortcuts like `ctrl+w` followed by one of `hjkl` to move to the left, down, up, or right, respectively. -Zed's keyboard bindings are evaluated only when the `"context"` matches the location you are in on the screen. Locations are nested, so when you're editing, you're in the `"Workspace"` location, which is at the top, containing a `"Pane"` that contains an `"Editor"`. +But you cannot use the same shortcuts to move between all the editor docks (the terminal, project panel, assistant panel, ...). If you want to use the same shortcuts to navigate to the docks, you can add the following key bindings to your user keymap. -Contexts are matched only on one level at a time. So, it is possible to combine `Editor && vim_mode == normal`, but `Workspace && vim_mode == normal` will never match because we set the vim context at the `Editor` level. +```json +{ + "context": "Dock", + "bindings": { + "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], + "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], + "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], + "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"] + // ... or other keybindings + } +} +``` -Vim mode adds several contexts to the `Editor`: +Subword motion, which allows you to navigate and select individual words in camelCase or snake_case, is not enabled by default. To enable it, add these bindings to your keymap. -- `vim_mode` is similar to, but not identical to, the current mode. It starts as one of `normal`, `visual`, `insert` or `replace` (depending on your mode). If you are mid-way through typing a sequence, `vim_mode` will be either `waiting` if it's waiting for an arbitrary key (for example after typing `f` or `t`), or `operator` if it's waiting for another binding to trigger (for example after typing `c` or `d`). -- `vim_operator` is set to `none` unless `vim_mode == operator` in which case it is set to the current operator's default keybinding (for example after typing `d`, `vim_operator == d`). -- `"VimControl"` indicates that vim keybindings should work. It is currently an alias for `vim_mode == normal || vim_mode == visual || vim_mode == operator`, but the definition may change over time. +```json +[ + { + "context": "VimControl && !menu && vim_mode != operator", + "bindings": { + "w": "vim::NextSubwordStart", + "b": "vim::PreviousSubwordStart", + "e": "vim::NextSubwordEnd", + "g e": "vim::PreviousSubwordEnd" + } + } +] +``` + +Vim mode comes with shortcuts to surround the selection in normal mode (`ys`), but it doesn't have a shortcut to add surrounds in visual mode. By default, `shift-s` substitutes the selection (erases the text and enters insert mode). To use `shift-s` to add surrounds in visual mode, you can add the following object to your keymap. + +```json +{ + "context": "vim_mode == visual", + "bindings": { + "shift-s": [ + "vim::PushOperator", + { + "AddSurrounds": {} + } + ] + } +} +``` ### Restoring common text editing keybindings -If you're using vim mode on Linux or Windows, you may find it overrides keybindings you can't live without: Ctrl+v to copy, Ctrl+f to search, etc. You can restore them by copying this data into your keymap: +If you're using vim mode on Linux or Windows, you may find it overrides keybindings you can't live without: `ctrl+v` to copy, `ctrl+f` to search, etc. You can restore them by copying this data into your keymap: ```json { @@ -162,109 +398,39 @@ If you're using vim mode on Linux or Windows, you may find it overrides keybindi }, ``` -## Command palette - -Vim mode allows you to enable Zed’s command palette with `:`. This means that you can use vim's command palette to run any action that Zed supports. - -Additionally, vim mode contains a number of aliases for popular Vim commands to ensure that muscle memory works. For example, `:w` will save the file. - -We do not (yet) emulate the full power of Vim’s command line, in particular, we do not support arguments to commands yet. Please [file issues on GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. - -As mentioned above, one thing to be aware of is that the regex engine is slightly different from vim's in `:%s/a/b`. - -Currently supported Vim-specific commands: +## Changing vim mode settings -``` -# window management -:w[rite][!], :wq[!], :q[uit][!], :wa[ll][!], :wqa[ll][!], :qa[ll][!], :[e]x[it][!], :up[date] - to save/close tab(s) and pane(s) (no filename is supported yet) -:cq - to quit completely. -:vs[plit], :sp[lit] - to split vertically/horizontally (no filename is supported yet) -:new, :vne[w] - to create a new file in a new pane above or to the left -:tabedit, :tabnew - to create a new file in a new tab. -:tabn[ext], :tabp[rev] - to go to previous/next tabs -:tabc[lose] - to close the current tab - -# navigating diagnostics -:cn[ext], :cp[rev], :ln[ext], :lp[rev] - to go to the next/prev diagnostics -:cc, :ll - to open the errors page - -# handling git diff -:dif[fupdate] - to view the diff under the cursor ("d o" in normal mode) -:rev[ert] - to revert the diff under the cursor ("d p" in normal mode) - -# jump to position -: - to jump to a line number -:$ - to jump to the end of the file -:/foo and :?foo - to jump to next/prev line matching foo - -# replacement (/g is always assumed and Zed uses different regex syntax to vim) -:[range]s/foo/bar/ - to replace instances of foo with bar - -# editing -:j[oin] - to join the current line (no range is yet supported) -:d[elete][l][p] - to delete the current line (no range is yet supported) -:s[ort] [i] - to sort the current selection (with i, case-insensitively) -:y[ank] -``` - -As any Zed command is available, you may find that it's helpful to remember mnemonics that run the correct command. For example: +You can change the following settings to modify vim mode's behavior: -``` -:diffs Toggle all Hunk [Diffs] -:cpp [C]o[p]y [P]ath to file -:crp [C]opy [r]elative [P]ath -:reveal [Reveal] in finder -:zlog Open [Z]ed Log -:clank [C]ancel [lan]guage server work[k] -``` +| Property | Description | Default Value | +| ---------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- | +| use_system_clipboard | Determines how system clipboard is used:
  • "always": use for all operations
  • "never": only use when explicitly specified
  • "on_yank": use for yank operations
| "always" | +| use_multiline_find | If `true`, `f` and `t` motions extend across multiple lines. | false | +| use_smartcase_find | If `true`, `f` and `t` motions are case-insensitive when the target letter is lowercase. | false | +| toggle_relative_line_numbers | If `true`, line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options. | false | +| custom_digraphs | An object that allows you to add custom digraphs. Read below for an example. | {} | -## Settings - -Vim mode is not enabled by default. To enable vim mode, you need to add the following configuration to your settings file: +Here's an example of adding a digraph for the zombie emoji. This allows you to type `ctrl-k f z` to insert a zombie emoji. You can add as many digraphs as you like. ```json { - "vim_mode": true + "vim": { + "custom_digraphs": { + "fz": "🧟‍♀️" + } + } } ``` -Alternatively, you can enable vim mode by running the `toggle vim mode` command from the command palette. - -Some vim settings are available to modify the default vim behavior: +Here's an example of these settings changed: ```json { "vim": { - // "always": use system clipboard when no register is specified - // "never": don't use system clipboard unless "+ or "* is specified - // "on_yank": use system clipboard for yank operations when no register is specified - "use_system_clipboard": "always", - // Let `f` and `t` motions extend across multiple lines + "use_system_clipboard": "never", "use_multiline_find": true, - // Let `f` and `t` motions match case insensitively if the target is lowercase "use_smartcase_find": true, - // Use relative line numbers in normal mode, absolute in insert mode - // c.f. https://github.com/jeffkreeftmeijer/vim-numbertoggle "toggle_relative_line_numbers": true, - // Add custom digraphs (e.g. ctrl-k f z will insert a zombie emoji) "custom_digraphs": { "fz": "🧟‍♀️" } @@ -272,22 +438,36 @@ Some vim settings are available to modify the default vim behavior: } ``` -There are also a few Zed settings that you may also enjoy if you use vim mode: +## Useful core Zed settings for vim mode + +Here are a few general Zed settings that can help you fine-tune your Vim experience: + +| Property | Description | Default Value | +| ----------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------- | +| cursor_blink | If `true`, the cursor blinks. | `true` | +| relative_line_numbers | If `true`, line numbers in the left gutter are relative to the cursor. | `true` | +| scrollbar | Object that controls the scrollbar display. Set to `{ "show": "never" }` to hide the scroll bar. | `{ "show": "always" }` | +| scroll_beyond_last_line | If set to `"one_page"`, allows scrolling up to one page beyond the last line. Set to `"off"` to prevent this behavior. | `"one_page"` | +| vertical_scroll_margin | The number of lines to keep above or below the cursor when scrolling. Set to `0` to allow the cursor to go up to the edges of the screen vertically. | `3` | +| gutter.line_numbers | Controls the display of line numbers in the gutter. Set the `"line_numbers"` property to `false` to hide line numbers. | `true` | +| command_aliases | Object that defines aliases for commands in the command palette. You can use it to define shortcut names for commands you use often. Read below for examples. | `{}` | + +Here's an example of these settings changed: ```json { - // disable cursor blink + // Disable cursor blink "cursor_blink": false, - // use relative line numbers + // Use relative line numbers "relative_line_numbers": true, - // hide the scroll bar + // Hide the scroll bar "scrollbar": { "show": "never" }, - // prevent the buffer from scrolling beyond the last line + // Prevent the buffer from scrolling beyond the last line "scroll_beyond_last_line": "off", - // allow cursor to reach edges of screen + // Allow the cursor to reach the edges of the screen "vertical_scroll_margin": 0, "gutter": { - // disable line numbers completely: + // Disable line numbers completely: "line_numbers": false }, "command_aliases": { @@ -298,74 +478,17 @@ There are also a few Zed settings that you may also enjoy if you use vim mode: } ``` -If you want to navigate between the editor and docks (terminal, project panel, AI assistant panel, etc...), just like you navigate between splits, you can use the following key bindings: - -```json -{ - "context": "Dock", - "bindings": { - "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], - "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], - "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], - "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"] - // ... or other keybindings - } -} -``` - -Subword motion is not enabled by default. To enable it, add these bindings to your keymap. - -```json -[ - { - "context": "VimControl && !menu && vim_mode != operator", - "bindings": { - "w": "vim::NextSubwordStart", - "b": "vim::PreviousSubwordStart", - "e": "vim::NextSubwordEnd", - "g e": "vim::PreviousSubwordEnd" - } - } -] -``` - -Surrounding the selection in visual mode is also not enabled by default (`shift-s` normally behaves like `c`). To enable it, add the following to your keymap. - -```json -{ - "context": "vim_mode == visual", - "bindings": { - "shift-s": [ - "vim::PushOperator", - { - "AddSurrounds": {} - } - ] - } -} -``` - -## Supported plugins - -Zed has nascent support for some Vim plugins: - -- From `vim-surround`, `ys`, `cs` and `ds` work. Though you cannot add new HTML tags yet. -- From `vim-commentary`, `gc` in visual mode and `gcc` in normal mode. Though you cannot operate on arbitrary objects yet. -- From `netrw`, most keybindings are supported in the project panel. -- From `vim-spider`/`CamelCaseMotion` you can use subword motions as described above. +The `command_aliases` property is a single object that maps keys or key sequences to vim mode commands. The example above defines multiple aliases: `W` for `w`, `Wq` for `wq`, and `Q` for `q`. ## Regex differences -Zed uses a different regular expression engine from Vim. This means that you will have to use a different syntax for some things. - -Notably: +Zed uses a different regular expression engine from Vim. This means that you will have to use a different syntax in some cases. Here are the most common differences: -- Vim uses `\(` and `\)` to represent capture groups, in Zed these are `(` and `)`. -- On the flip side, `(` and `)` represent literal parentheses, but in Zed these must be escaped to `\(` and `\)`. -- When replacing, Vim uses `\0` to represent the entire match, in Zed this is `$0`, same for numbered capture groups `\1` -> `$1`. -- Vim uses `/g` to indicate "all matches on one line", in Zed this is implied -- Vim uses `/i` to indicate "case-insensitive", in Zed you can either use `(?i)` at the start of the pattern or toggle case-sensitivity with `cmd-option-c`. +- **Capture groups**: Vim uses `\(` and `\)` to represent capture groups, in Zed these are `(` and `)`. On the flip side, in Vim, `(` and `)` represent literal parentheses, but in Zed these must be escaped to `\(` and `\)`. +- **Matches**: When replacing, Vim uses the backslash character followed by a number to represent a matched capture group. For example, `\1`. Zed uses the dollar sign instead. So, when in Vim you use `\0` to represent the entire match, in Zed the syntax is `$0` instead. Same for numbered capture groups: `\1` in Vim is `$1` in Zed. +- **Global option**: By default, in Vim, regex searches only match the first occurrence on a line, and you append `/g` at the end of your query to find all matches. In Zed, regex searches are global by default. +- **Case sensitivity**: Vim uses `/i` to indicate a case-insensitive search. In Zed you can either write `(?i)` at the start of the pattern or toggle case-sensitivity with the shortcut {#kb search::ToggleCaseSensitive}. -To help with the transition, the command palette will fix parentheses and replace groups for you when you run `:%s//`. So `%s:/\(a\)(b)/\1/` will be converted into a search for "(a)\(b\)" and a replacement of "$1". +> **Note**: To help with the transition, the command palette will fix parentheses and replace groups for you when you write a Vim-style substitute command, `:%s//`. So, Zed will convert `%s:/\(a\)(b)/\1/` into a search for "(a)\(b\)" and a replacement of "$1". For the full syntax supported by Zed's regex engine [see the regex crate documentation](https://docs.rs/regex/latest/regex/#syntax). From a36706aed6e7f582f731a4f33ef3b056dac25f36 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Sep 2024 09:11:58 -0600 Subject: [PATCH 271/762] Fix up/down project_id confusion (#18099) Release Notes: - ssh remoting: Fix LSP queries run over collab --- crates/project/src/lsp_store.rs | 137 +++++++++++-------- crates/project/src/project.rs | 19 +-- crates/project/src/worktree_store.rs | 103 +++++++++----- crates/remote_server/src/headless_project.rs | 2 +- 4 files changed, 161 insertions(+), 100 deletions(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 95ca84236001ce..4506fcc6feb430 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -50,7 +50,7 @@ use parking_lot::{Mutex, RwLock}; use postage::watch; use rand::prelude::*; -use rpc::AnyProtoClient; +use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient}; use serde::Serialize; use settings::{Settings, SettingsLocation, SettingsStore}; use sha2::{Digest, Sha256}; @@ -132,6 +132,7 @@ impl LocalLspStore { pub struct RemoteLspStore { upstream_client: AnyProtoClient, + upstream_project_id: u64, } impl RemoteLspStore {} @@ -164,8 +165,7 @@ impl LspStoreMode { pub struct LspStore { mode: LspStoreMode, - downstream_client: Option, - project_id: u64, + downstream_client: Option<(AnyProtoClient, u64)>, nonce: u128, buffer_store: Model, worktree_store: Model, @@ -302,14 +302,16 @@ impl LspStore { } } - pub fn upstream_client(&self) -> Option { + pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { match &self.mode { LspStoreMode::Ssh(SshLspStore { upstream_client, .. - }) - | LspStoreMode::Remote(RemoteLspStore { - upstream_client, .. - }) => Some(upstream_client.clone()), + }) => Some((upstream_client.clone(), SSH_PROJECT_ID)), + LspStoreMode::Remote(RemoteLspStore { + upstream_client, + upstream_project_id, + .. + }) => Some((upstream_client.clone(), *upstream_project_id)), LspStoreMode::Local(_) => None, } } @@ -374,7 +376,6 @@ impl LspStore { }), }), downstream_client: None, - project_id: 0, buffer_store, worktree_store, languages: languages.clone(), @@ -395,10 +396,11 @@ impl LspStore { &self, buffer: Model, client: AnyProtoClient, + upstream_project_id: u64, request: R, cx: &mut ModelContext<'_, LspStore>, ) -> Task::Response>> { - let message = request.to_proto(self.project_id, buffer.read(cx)); + let message = request.to_proto(upstream_project_id, buffer.read(cx)); cx.spawn(move |this, cx| async move { let response = client.request(message).await?; let this = this.upgrade().context("project dropped")?; @@ -413,7 +415,6 @@ impl LspStore { worktree_store: Model, languages: Arc, upstream_client: AnyProtoClient, - project_id: u64, cx: &mut ModelContext, ) -> Self { cx.subscribe(&buffer_store, Self::on_buffer_store_event) @@ -429,7 +430,6 @@ impl LspStore { current_lsp_settings: Default::default(), }), downstream_client: None, - project_id, buffer_store, worktree_store, languages: languages.clone(), @@ -461,9 +461,11 @@ impl LspStore { .detach(); Self { - mode: LspStoreMode::Remote(RemoteLspStore { upstream_client }), + mode: LspStoreMode::Remote(RemoteLspStore { + upstream_client, + upstream_project_id: project_id, + }), downstream_client: None, - project_id, buffer_store, worktree_store, languages: languages.clone(), @@ -768,13 +770,13 @@ impl LspStore { } pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) { - if let Some(client) = self.downstream_client.clone() { + if let Some((client, downstream_project_id)) = self.downstream_client.clone() { if let Some(summaries) = self.diagnostic_summaries.get(&worktree.id()) { for (path, summaries) in summaries { for (&server_id, summary) in summaries { client .send(proto::UpdateDiagnosticSummary { - project_id: self.project_id, + project_id: downstream_project_id, worktree_id: worktree.id().to_proto(), summary: Some(summary.to_proto(server_id, path)), }) @@ -798,8 +800,14 @@ impl LspStore { { let buffer = buffer_handle.read(cx); - if let Some(upstream_client) = self.upstream_client() { - return self.send_lsp_proto_request(buffer_handle, upstream_client, request, cx); + if let Some((upstream_client, upstream_project_id)) = self.upstream_client() { + return self.send_lsp_proto_request( + buffer_handle, + upstream_client, + upstream_project_id, + request, + cx, + ); } let language_server = match server { @@ -1077,9 +1085,9 @@ impl LspStore { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let request = proto::ApplyCodeAction { - project_id: self.project_id, + project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), action: Some(Self::serialize_code_action(&action)), }; @@ -1163,9 +1171,9 @@ impl LspStore { server_id: LanguageServerId, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let request = proto::ResolveInlayHint { - project_id: self.project_id, + project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), language_server_id: server_id.0 as u64, hint: Some(InlayHints::project_to_proto_hint(hint.clone())), @@ -1274,9 +1282,9 @@ impl LspStore { trigger: String, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = proto::OnTypeFormatting { - project_id: self.project_id, + project_id, buffer_id: buffer.read(cx).remote_id().into(), position: Some(serialize_anchor(&position)), trigger, @@ -1424,11 +1432,11 @@ impl LspStore { range: Range, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let request_task = upstream_client.request(proto::MultiLspQuery { buffer_id: buffer_handle.read(cx).remote_id().into(), version: serialize_version(&buffer_handle.read(cx).version()), - project_id: self.project_id, + project_id, strategy: Some(proto::multi_lsp_query::Strategy::All( proto::AllLanguageServers {}, )), @@ -1437,7 +1445,7 @@ impl LspStore { range: range.clone(), kinds: None, } - .to_proto(self.project_id, buffer_handle.read(cx)), + .to_proto(project_id, buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -1504,10 +1512,11 @@ impl LspStore { ) -> Task>> { let language_registry = self.languages.clone(); - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let task = self.send_lsp_proto_request( buffer.clone(), upstream_client, + project_id, GetCompletions { position, context }, cx, ); @@ -1603,14 +1612,13 @@ impl LspStore { ) -> Task> { let client = self.upstream_client(); let language_registry = self.languages.clone(); - let project_id = self.project_id; let buffer_id = buffer.read(cx).remote_id(); let buffer_snapshot = buffer.read(cx).snapshot(); cx.spawn(move |this, cx| async move { let mut did_resolve = false; - if let Some(client) = client { + if let Some((client, project_id)) = client { for completion_index in completion_indices { let (server_id, completion) = { let completions_guard = completions.read(); @@ -1811,8 +1819,7 @@ impl LspStore { let buffer = buffer_handle.read(cx); let buffer_id = buffer.remote_id(); - if let Some(client) = self.upstream_client() { - let project_id = self.project_id; + if let Some((client, project_id)) = self.upstream_client() { cx.spawn(move |_, mut cx| async move { let response = client .request(proto::ApplyCompletionAdditionalEdits { @@ -1927,9 +1934,9 @@ impl LspStore { let buffer_id = buffer.remote_id().into(); let lsp_request = InlayHints { range }; - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = proto::InlayHints { - project_id: self.project_id, + project_id, buffer_id, start: Some(serialize_anchor(&range_start)), end: Some(serialize_anchor(&range_end)), @@ -1977,16 +1984,16 @@ impl LspStore { ) -> Task> { let position = position.to_point_utf16(buffer.read(cx)); - if let Some(client) = self.upstream_client() { + if let Some((client, upstream_project_id)) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), - project_id: self.project_id, + project_id: upstream_project_id, strategy: Some(proto::multi_lsp_query::Strategy::All( proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetSignatureHelp( - GetSignatureHelp { position }.to_proto(self.project_id, buffer.read(cx)), + GetSignatureHelp { position }.to_proto(upstream_project_id, buffer.read(cx)), )), }); let buffer = buffer.clone(); @@ -2049,16 +2056,16 @@ impl LspStore { position: PointUtf16, cx: &mut ModelContext, ) -> Task> { - if let Some(client) = self.upstream_client() { + if let Some((client, upstream_project_id)) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), - project_id: self.project_id, + project_id: upstream_project_id, strategy: Some(proto::multi_lsp_query::Strategy::All( proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetHover( - GetHover { position }.to_proto(self.project_id, buffer.read(cx)), + GetHover { position }.to_proto(upstream_project_id, buffer.read(cx)), )), }); let buffer = buffer.clone(); @@ -2123,9 +2130,9 @@ impl LspStore { pub fn symbols(&self, query: &str, cx: &mut ModelContext) -> Task>> { let language_registry = self.languages.clone(); - if let Some(upstream_client) = self.upstream_client().as_ref() { + if let Some((upstream_client, project_id)) = self.upstream_client().as_ref() { let request = upstream_client.request(proto::GetProjectSymbols { - project_id: self.project_id, + project_id: *project_id, query: query.to_string(), }); cx.foreground_executor().spawn(async move { @@ -2598,8 +2605,7 @@ impl LspStore { downstream_client: AnyProtoClient, _: &mut ModelContext, ) { - self.project_id = project_id; - self.downstream_client = Some(downstream_client.clone()); + self.downstream_client = Some((downstream_client.clone(), project_id)); for (server_id, status) in &self.language_server_statuses { downstream_client @@ -2857,10 +2863,10 @@ impl LspStore { } if !old_summary.is_empty() || !new_summary.is_empty() { - if let Some(downstream_client) = &self.downstream_client { + if let Some((downstream_client, project_id)) = &self.downstream_client { downstream_client .send(proto::UpdateDiagnosticSummary { - project_id: self.project_id, + project_id: *project_id, worktree_id: worktree_id.to_proto(), summary: Some(proto::DiagnosticSummary { path: worktree_path.to_string_lossy().to_string(), @@ -2881,9 +2887,9 @@ impl LspStore { symbol: &Symbol, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = client.request(proto::OpenBufferForSymbol { - project_id: self.project_id, + project_id, symbol: Some(Self::serialize_symbol(symbol)), }); cx.spawn(move |this, mut cx| async move { @@ -3184,6 +3190,17 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { + let response_from_ssh = this.update(&mut cx, |this, _| { + let ssh = this.as_ssh()?; + let mut payload = envelope.payload.clone(); + payload.project_id = SSH_PROJECT_ID; + + Some(ssh.upstream_client.request(payload)) + })?; + if let Some(response_from_ssh) = response_from_ssh { + return response_from_ssh.await; + } + let sender_id = envelope.original_sender_id().unwrap_or_default(); let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let version = deserialize_version(&envelope.payload.version); @@ -4779,10 +4796,11 @@ impl LspStore { // TODO: We should use `adapter` here instead of reaching through the `CachedLspAdapter`. let lsp_adapter = adapter.adapter.clone(); - let project_id = self.project_id; + let Some((upstream_client, project_id)) = self.upstream_client() else { + return; + }; let worktree_id = worktree.read(cx).id().to_proto(); - let upstream_client = ssh.upstream_client.clone(); - let name = adapter.name(); + let name = adapter.name().to_string(); let Some(available_language) = self.languages.available_language_for_name(&language) else { log::error!("failed to find available language {language}"); @@ -5165,12 +5183,11 @@ impl LspStore { } }); - let project_id = self.project_id; for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { summaries.retain(|path, summaries_by_server_id| { if summaries_by_server_id.remove(&server_id).is_some() { - if let Some(downstream_client) = self.downstream_client.clone() { - downstream_client + if let Some((client, project_id)) = self.downstream_client.clone() { + client .send(proto::UpdateDiagnosticSummary { project_id, worktree_id: worktree_id.to_proto(), @@ -5236,9 +5253,9 @@ impl LspStore { buffers: impl IntoIterator>, cx: &mut ModelContext, ) { - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = client.request(proto::RestartLanguageServers { - project_id: self.project_id, + project_id, buffer_ids: buffers .into_iter() .map(|b| b.read(cx).remote_id().to_proto()) @@ -5694,9 +5711,9 @@ impl LspStore { async move { this.update(&mut cx, |this, cx| { cx.emit(LspStoreEvent::RefreshInlayHints); - this.downstream_client.as_ref().map(|client| { + this.downstream_client.as_ref().map(|(client, project_id)| { client.send(proto::RefreshInlayHints { - project_id: this.project_id, + project_id: *project_id, }) }) })? @@ -6073,9 +6090,9 @@ impl LspStore { cx.emit(LspStoreEvent::LanguageServerAdded(server_id)); - if let Some(downstream_client) = self.downstream_client.as_ref() { + if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() { downstream_client.send(proto::StartLanguageServer { - project_id: self.project_id, + project_id: *project_id, server: Some(proto::LanguageServer { id: server_id.0 as u64, name: language_server.name().to_string(), diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 78584cbae0c65f..0c54a16187a4a6 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -625,7 +625,7 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let worktree_store = cx.new_model(|_| WorktreeStore::new(None, false, fs.clone())); + let worktree_store = cx.new_model(|_| WorktreeStore::local(false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -722,7 +722,7 @@ impl Project { SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); let worktree_store = - cx.new_model(|_| WorktreeStore::new(Some(ssh.clone().into()), false, fs.clone())); + cx.new_model(|_| WorktreeStore::remote(false, ssh.clone().into(), 0, None)); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -744,7 +744,6 @@ impl Project { worktree_store.clone(), languages.clone(), ssh.clone().into(), - 0, cx, ) }); @@ -874,11 +873,15 @@ impl Project { let role = response.payload.role(); let worktree_store = cx.new_model(|_| { - let mut store = WorktreeStore::new(Some(client.clone().into()), true, fs.clone()); - if let Some(dev_server_project_id) = response.payload.dev_server_project_id { - store.set_dev_server_project_id(DevServerProjectId(dev_server_project_id)); - } - store + WorktreeStore::remote( + true, + client.clone().into(), + response.payload.project_id, + response + .payload + .dev_server_project_id + .map(DevServerProjectId), + ) })?; let buffer_store = cx.new_model(|cx| BufferStore::new(worktree_store.clone(), Some(remote_id), cx))?; diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 5c3b2a00a98665..9f25572fc7ec08 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -36,19 +36,27 @@ struct MatchingEntry { respond: oneshot::Sender, } +enum WorktreeStoreState { + Local { + fs: Arc, + }, + Remote { + dev_server_project_id: Option, + upstream_client: AnyProtoClient, + upstream_project_id: u64, + }, +} + pub struct WorktreeStore { next_entry_id: Arc, - upstream_client: Option, - downstream_client: Option, - remote_id: u64, - dev_server_project_id: Option, + downstream_client: Option<(AnyProtoClient, u64)>, retain_worktrees: bool, worktrees: Vec, worktrees_reordered: bool, #[allow(clippy::type_complexity)] loading_worktrees: HashMap, Shared, Arc>>>>, - fs: Arc, + state: WorktreeStoreState, } pub enum WorktreeStoreEvent { @@ -69,27 +77,37 @@ impl WorktreeStore { client.add_model_request_handler(Self::handle_expand_project_entry); } - pub fn new( - upstream_client: Option, - retain_worktrees: bool, - fs: Arc, - ) -> Self { + pub fn local(retain_worktrees: bool, fs: Arc) -> Self { Self { next_entry_id: Default::default(), loading_worktrees: Default::default(), - dev_server_project_id: None, downstream_client: None, worktrees: Vec::new(), worktrees_reordered: false, retain_worktrees, - remote_id: 0, - upstream_client, - fs, + state: WorktreeStoreState::Local { fs }, } } - pub fn set_dev_server_project_id(&mut self, id: DevServerProjectId) { - self.dev_server_project_id = Some(id); + pub fn remote( + retain_worktrees: bool, + upstream_client: AnyProtoClient, + upstream_project_id: u64, + dev_server_project_id: Option, + ) -> Self { + Self { + next_entry_id: Default::default(), + loading_worktrees: Default::default(), + downstream_client: None, + worktrees: Vec::new(), + worktrees_reordered: false, + retain_worktrees, + state: WorktreeStoreState::Remote { + upstream_client, + upstream_project_id, + dev_server_project_id, + }, + } } /// Iterates through all worktrees, including ones that don't appear in the project panel @@ -159,14 +177,28 @@ impl WorktreeStore { ) -> Task>> { let path: Arc = abs_path.as_ref().into(); if !self.loading_worktrees.contains_key(&path) { - let task = if let Some(client) = self.upstream_client.clone() { - if let Some(dev_server_project_id) = self.dev_server_project_id { - self.create_dev_server_worktree(client, dev_server_project_id, abs_path, cx) - } else { - self.create_ssh_worktree(client, abs_path, visible, cx) + let task = match &self.state { + WorktreeStoreState::Remote { + upstream_client, + dev_server_project_id, + .. + } => { + if let Some(dev_server_project_id) = dev_server_project_id { + self.create_dev_server_worktree( + upstream_client.clone(), + *dev_server_project_id, + abs_path, + cx, + ) + } else if upstream_client.is_via_collab() { + Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab")))) + } else { + self.create_ssh_worktree(upstream_client.clone(), abs_path, visible, cx) + } + } + WorktreeStoreState::Local { fs } => { + self.create_local_worktree(fs.clone(), abs_path, visible, cx) } - } else { - self.create_local_worktree(abs_path, visible, cx) }; self.loading_worktrees.insert(path.clone(), task.shared()); @@ -236,11 +268,11 @@ impl WorktreeStore { fn create_local_worktree( &mut self, + fs: Arc, abs_path: impl AsRef, visible: bool, cx: &mut ModelContext, ) -> Task, Arc>> { - let fs = self.fs.clone(); let next_entry_id = self.next_entry_id.clone(); let path: Arc = abs_path.as_ref().into(); @@ -374,6 +406,17 @@ impl WorktreeStore { self.worktrees_reordered = worktrees_reordered; } + fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { + match &self.state { + WorktreeStoreState::Remote { + upstream_client, + upstream_project_id, + .. + } => Some((upstream_client.clone(), *upstream_project_id)), + WorktreeStoreState::Local { .. } => None, + } + } + pub fn set_worktrees_from_proto( &mut self, worktrees: Vec, @@ -389,8 +432,8 @@ impl WorktreeStore { }) .collect::>(); - let client = self - .upstream_client + let (client, project_id) = self + .upstream_client() .clone() .ok_or_else(|| anyhow!("invalid project"))?; @@ -408,7 +451,7 @@ impl WorktreeStore { self.worktrees.push(handle); } else { self.add( - &Worktree::remote(self.remote_id, replica_id, worktree, client.clone(), cx), + &Worktree::remote(project_id, replica_id, worktree, client.clone(), cx), cx, ); } @@ -477,10 +520,9 @@ impl WorktreeStore { } pub fn send_project_updates(&mut self, cx: &mut ModelContext) { - let Some(downstream_client) = self.downstream_client.clone() else { + let Some((downstream_client, project_id)) = self.downstream_client.clone() else { return; }; - let project_id = self.remote_id; let update = proto::UpdateProject { project_id, @@ -549,8 +591,7 @@ impl WorktreeStore { cx: &mut ModelContext, ) { self.retain_worktrees = true; - self.remote_id = remote_id; - self.downstream_client = Some(downsteam_client); + self.downstream_client = Some((downsteam_client, remote_id)); // When shared, retain all worktrees for worktree_handle in self.worktrees.iter_mut() { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 9d5c26d6c7ce1a..0d644a64a6aa7a 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -45,7 +45,7 @@ impl HeadlessProject { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); let worktree_store = cx.new_model(|cx| { - let mut store = WorktreeStore::new(None, true, fs.clone()); + let mut store = WorktreeStore::local(true, fs.clone()); store.shared(SSH_PROJECT_ID, session.clone().into(), cx); store }); From bc751d6c1994634cea98bb855ec2981d3a976d8c Mon Sep 17 00:00:00 2001 From: Boris Verkhovskiy Date: Mon, 23 Sep 2024 10:03:55 -0600 Subject: [PATCH 272/762] Don't highlight Python import names as type (#17984) Works on #14892 Follow up to #17473 --- crates/languages/src/python/highlights.scm | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index df6b60466cd2a6..3255677bedc428 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -2,24 +2,6 @@ (attribute attribute: (identifier) @property) (type (identifier) @type) -; Module imports - -(import_statement - (dotted_name (identifier) @type)) - -(import_statement - (aliased_import - name: (dotted_name (identifier) @type) - alias: (identifier) @type)) - -(import_from_statement - (dotted_name (identifier) @type)) - -(import_from_statement - (aliased_import - name: (dotted_name (identifier) @type) - alias: (identifier) @type)) - ; Function calls (decorator) @function From 3c95a64a23c96303b864335ec55c3ec93ca0e414 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:11:26 -0400 Subject: [PATCH 273/762] Add a rather-conservative stale issue action in dry-run mode (#18233) Zed is becoming more popular and our issue tracker is only growing larger and larger. I realize that a stale issue action can be controversial, but the way we currently manage issues hasn't scaled well and it will only get worse. We need some crowd-sourced system. Let's ask those who have opened issues if their issues are still valid. This is rather conservative and only targets bugs and crashes. I'll run it in debug mode, report the results, and enable it if it feels right. We can always turn this off if users end up really not liking it. My original rules were: ```txt If an issue is old enough (12 months or older) AND if there are no recent comments from the team (last dev comment is older than 6 months) AND it has less than X upvotes (5) AND it does not have an open PR linked to it AND is a "defect" or "panic / crash" AND does not have a "ignore top-ranking issues" label AND was not opened by a org member AND is open AND is issue (not a pull request) THEN close the issue with a kind message. ``` But only some of these were actually supported in the configuration. Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 28 ++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 .github/workflows/close_stale_issues.yml diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml new file mode 100644 index 00000000000000..240403169c471e --- /dev/null +++ b/.github/workflows/close_stale_issues.yml @@ -0,0 +1,28 @@ +name: "Close Stale Issues" +on: + schedule: + - cron: "0 1 * * *" + workflow_dispatch: + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-issue-message: > + Hi there! 👋 + + We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in a week. Feel free to open a new issue if you're seeing this message after the issue has been closed. + + Thanks for your help! + close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" + days-before-stale: 365 + days-before-close: 7 + only-issue-labels: "defect,panic / crash" + operations-per-run: 100 + ascending: true + enable-statistics: true + debug-only: true + stale-issue-label: "stale" From 20826336d9c2815da327ead99006b28dc8800082 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:15:33 -0400 Subject: [PATCH 274/762] update stale issue configuration to use `any-of-issue-labels` (#18236) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 240403169c471e..1cac6450e86052 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -20,7 +20,7 @@ jobs: close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" days-before-stale: 365 days-before-close: 7 - only-issue-labels: "defect,panic / crash" + any-of-issue-labels: "defect,panic / crash" operations-per-run: 100 ascending: true enable-statistics: true From 65bb989c61e90271e544566b7999feaa9e8ff105 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 23 Sep 2024 12:16:51 -0400 Subject: [PATCH 275/762] gpui: Update doc comment for `SharedString::new_static` (#18234) This PR updates the doc comment for `SharedString::new_static`. Release Notes: - N/A --- crates/gpui/src/shared_string.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/shared_string.rs b/crates/gpui/src/shared_string.rs index f5aef6adf80be6..e1fd4f1a5caa9c 100644 --- a/crates/gpui/src/shared_string.rs +++ b/crates/gpui/src/shared_string.rs @@ -10,9 +10,9 @@ use util::arc_cow::ArcCow; pub struct SharedString(ArcCow<'static, str>); impl SharedString { - /// creates a static SharedString - pub const fn new_static(s: &'static str) -> Self { - Self(ArcCow::Borrowed(s)) + /// Creates a static [`SharedString`] from a `&'static str`. + pub const fn new_static(str: &'static str) -> Self { + Self(ArcCow::Borrowed(str)) } } From 11953bbc16c12c61363f6e15d023c6ff9488114a Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:24:49 -0400 Subject: [PATCH 276/762] Disable debug mode for stale issue action (#18237) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 1cac6450e86052..be4f6f4af04bae 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -24,5 +24,4 @@ jobs: operations-per-run: 100 ascending: true enable-statistics: true - debug-only: true stale-issue-label: "stale" From 1efe87029bffc2b23784247db3a851dc11ba9ae8 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:32:31 -0400 Subject: [PATCH 277/762] Update stale issues configuration to use 180 days (#18238) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index be4f6f4af04bae..afc28ec180b15e 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -18,7 +18,7 @@ jobs: Thanks for your help! close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" - days-before-stale: 365 + days-before-stale: 180 days-before-close: 7 any-of-issue-labels: "defect,panic / crash" operations-per-run: 100 From 7051bc00c2fe8d7407480a805e950cb73343bb45 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 23 Sep 2024 11:40:34 -0600 Subject: [PATCH 278/762] Add "Fix with Assistant" code action on lines with diagnostics (#18163) Release Notes: - Added a new "Fix with Assistant" action on code with errors or warnings. --------- Co-authored-by: Nathan --- Cargo.lock | 1 + crates/assistant/Cargo.toml | 1 + crates/assistant/src/inline_assistant.rs | 195 ++++++++++++++++-- crates/assistant/src/workflow.rs | 1 + .../remote_editing_collaboration_tests.rs | 1 + crates/editor/src/editor.rs | 169 +++++++++++---- crates/gpui/src/executor.rs | 6 +- crates/multi_buffer/src/multi_buffer.rs | 67 ++++++ crates/project/src/lsp_store.rs | 34 +-- crates/project/src/project.rs | 2 +- crates/project/src/project_tests.rs | 3 +- crates/search/src/project_search.rs | 2 +- crates/workspace/src/workspace.rs | 8 +- 13 files changed, 418 insertions(+), 72 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c0f6751b895e28..e345736295613b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -404,6 +404,7 @@ dependencies = [ "language_model", "languages", "log", + "lsp", "markdown", "menu", "multi_buffer", diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml index 9f715d822474d2..9e61eee18aaf8a 100644 --- a/crates/assistant/Cargo.toml +++ b/crates/assistant/Cargo.toml @@ -51,6 +51,7 @@ indoc.workspace = true language.workspace = true language_model.workspace = true log.workspace = true +lsp.workspace = true markdown.workspace = true menu.workspace = true multi_buffer.workspace = true diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index f2428c3a2e94cf..9c117e66653e93 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -12,8 +12,9 @@ use editor::{ BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, ToDisplayPoint, }, - Anchor, AnchorRangeExt, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, - ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, + Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorElement, EditorEvent, EditorMode, + EditorStyle, ExcerptId, ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, + ToOffset as _, ToPoint, }; use feature_flags::{FeatureFlagAppExt as _, ZedPro}; use fs::Fs; @@ -35,6 +36,7 @@ use language_model::{ }; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; +use project::{CodeAction, ProjectTransaction}; use rope::Rope; use settings::{Settings, SettingsStore}; use smol::future::FutureExt; @@ -49,10 +51,11 @@ use std::{ time::{Duration, Instant}, }; use terminal_view::terminal_panel::TerminalPanel; +use text::{OffsetRangeExt, ToPoint as _}; use theme::ThemeSettings; use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip}; use util::{RangeExt, ResultExt}; -use workspace::{notifications::NotificationId, Toast, Workspace}; +use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace}; pub fn init( fs: Arc, @@ -129,8 +132,10 @@ impl InlineAssistant { } pub fn register_workspace(&mut self, workspace: &View, cx: &mut WindowContext) { - cx.subscribe(workspace, |_, event, cx| { - Self::update_global(cx, |this, cx| this.handle_workspace_event(event, cx)); + cx.subscribe(workspace, |workspace, event, cx| { + Self::update_global(cx, |this, cx| { + this.handle_workspace_event(workspace, event, cx) + }); }) .detach(); @@ -150,19 +155,49 @@ impl InlineAssistant { .detach(); } - fn handle_workspace_event(&mut self, event: &workspace::Event, cx: &mut WindowContext) { - // When the user manually saves an editor, automatically accepts all finished transformations. - if let workspace::Event::UserSavedItem { item, .. } = event { - if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) { - if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { - for assist_id in editor_assists.assist_ids.clone() { - let assist = &self.assists[&assist_id]; - if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { - self.finish_assist(assist_id, false, cx) + fn handle_workspace_event( + &mut self, + workspace: View, + event: &workspace::Event, + cx: &mut WindowContext, + ) { + match event { + workspace::Event::UserSavedItem { item, .. } => { + // When the user manually saves an editor, automatically accepts all finished transformations. + if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) { + if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { + for assist_id in editor_assists.assist_ids.clone() { + let assist = &self.assists[&assist_id]; + if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { + self.finish_assist(assist_id, false, cx) + } } } } } + workspace::Event::ItemAdded { item } => { + self.register_workspace_item(&workspace, item.as_ref(), cx); + } + _ => (), + } + } + + fn register_workspace_item( + &mut self, + workspace: &View, + item: &dyn ItemHandle, + cx: &mut WindowContext, + ) { + if let Some(editor) = item.act_as::(cx) { + editor.update(cx, |editor, cx| { + editor.push_code_action_provider( + Arc::new(AssistantCodeActionProvider { + editor: cx.view().downgrade(), + workspace: workspace.downgrade(), + }), + cx, + ); + }); } } @@ -332,6 +367,7 @@ impl InlineAssistant { mut range: Range, initial_prompt: String, initial_transaction_id: Option, + focus: bool, workspace: Option>, assistant_panel: Option<&View>, cx: &mut WindowContext, @@ -404,6 +440,11 @@ impl InlineAssistant { assist_group.assist_ids.push(assist_id); editor_assists.assist_ids.push(assist_id); self.assist_groups.insert(assist_group_id, assist_group); + + if focus { + self.focus_assist(assist_id, cx); + } + assist_id } @@ -3289,6 +3330,132 @@ where } } +struct AssistantCodeActionProvider { + editor: WeakView, + workspace: WeakView, +} + +impl CodeActionProvider for AssistantCodeActionProvider { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>> { + let snapshot = buffer.read(cx).snapshot(); + let mut range = range.to_point(&snapshot); + + // Expand the range to line boundaries. + range.start.column = 0; + range.end.column = snapshot.line_len(range.end.row); + + let mut has_diagnostics = false; + for diagnostic in snapshot.diagnostics_in_range::<_, Point>(range.clone(), false) { + range.start = cmp::min(range.start, diagnostic.range.start); + range.end = cmp::max(range.end, diagnostic.range.end); + has_diagnostics = true; + } + if has_diagnostics { + if let Some(symbols_containing_start) = snapshot.symbols_containing(range.start, None) { + if let Some(symbol) = symbols_containing_start.last() { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); + } + } + + if let Some(symbols_containing_end) = snapshot.symbols_containing(range.end, None) { + if let Some(symbol) = symbols_containing_end.last() { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); + } + } + + Task::ready(Ok(vec![CodeAction { + server_id: language::LanguageServerId(0), + range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end), + lsp_action: lsp::CodeAction { + title: "Fix with Assistant".into(), + ..Default::default() + }, + }])) + } else { + Task::ready(Ok(Vec::new())) + } + } + + fn apply_code_action( + &self, + buffer: Model, + action: CodeAction, + excerpt_id: ExcerptId, + _push_to_history: bool, + cx: &mut WindowContext, + ) -> Task> { + let editor = self.editor.clone(); + let workspace = self.workspace.clone(); + cx.spawn(|mut cx| async move { + let editor = editor.upgrade().context("editor was released")?; + let range = editor + .update(&mut cx, |editor, cx| { + editor.buffer().update(cx, |multibuffer, cx| { + let buffer = buffer.read(cx); + let multibuffer_snapshot = multibuffer.read(cx); + + let old_context_range = + multibuffer_snapshot.context_range_for_excerpt(excerpt_id)?; + let mut new_context_range = old_context_range.clone(); + if action + .range + .start + .cmp(&old_context_range.start, buffer) + .is_lt() + { + new_context_range.start = action.range.start; + } + if action.range.end.cmp(&old_context_range.end, buffer).is_gt() { + new_context_range.end = action.range.end; + } + drop(multibuffer_snapshot); + + if new_context_range != old_context_range { + multibuffer.resize_excerpt(excerpt_id, new_context_range, cx); + } + + let multibuffer_snapshot = multibuffer.read(cx); + Some( + multibuffer_snapshot + .anchor_in_excerpt(excerpt_id, action.range.start)? + ..multibuffer_snapshot + .anchor_in_excerpt(excerpt_id, action.range.end)?, + ) + }) + })? + .context("invalid range")?; + let assistant_panel = workspace.update(&mut cx, |workspace, cx| { + workspace + .panel::(cx) + .context("assistant panel was released") + })??; + + cx.update_global(|assistant: &mut InlineAssistant, cx| { + let assist_id = assistant.suggest_assist( + &editor, + range, + "Fix Diagnostics".into(), + None, + true, + Some(workspace), + Some(&assistant_panel), + cx, + ); + assistant.start_assist(assist_id, cx); + })?; + + Ok(ProjectTransaction::default()) + }) + } +} + fn prefixes(text: &str) -> impl Iterator { (0..text.len() - 1).map(|ix| &text[..ix + 1]) } diff --git a/crates/assistant/src/workflow.rs b/crates/assistant/src/workflow.rs index 75c65ed0a78e42..8a770e21aa7caa 100644 --- a/crates/assistant/src/workflow.rs +++ b/crates/assistant/src/workflow.rs @@ -187,6 +187,7 @@ impl WorkflowSuggestion { suggestion_range, initial_prompt, initial_transaction_id, + false, Some(workspace.clone()), Some(assistant_panel), cx, diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index cdcf69cf7e9ace..a81166bb00ceec 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -53,6 +53,7 @@ async fn test_sharing_an_ssh_remote_project( let (project_a, worktree_id) = client_a .build_ssh_project("/code/project1", client_ssh, cx_a) .await; + executor.run_until_parked(); // User A shares the remote project. let active_call_a = cx_a.read(ActiveCall::global); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b1a3d95a0da780..cbc272d995213c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -68,7 +68,7 @@ use element::LineWithInvisibles; pub use element::{ CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, }; -use futures::FutureExt; +use futures::{future, FutureExt}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::blame::GitBlame; use git::diff_hunk_to_display; @@ -569,8 +569,8 @@ pub struct Editor { find_all_references_task_sources: Vec, next_completion_id: CompletionId, completion_documentation_pre_resolve_debounce: DebouncedDelay, - available_code_actions: Option<(Location, Arc<[CodeAction]>)>, - code_actions_task: Option>, + available_code_actions: Option<(Location, Arc<[AvailableCodeAction]>)>, + code_actions_task: Option>>, document_highlights_task: Option>, linked_editing_range_task: Option>>, linked_edit_ranges: linked_editing_ranges::LinkedEditingRanges, @@ -590,6 +590,7 @@ pub struct Editor { gutter_hovered: bool, hovered_link_state: Option, inline_completion_provider: Option, + code_action_providers: Vec>, active_inline_completion: Option, // enable_inline_completions is a switch that Vim can use to disable // inline completions based on its mode. @@ -1360,10 +1361,16 @@ impl CompletionsMenu { } } +struct AvailableCodeAction { + excerpt_id: ExcerptId, + action: CodeAction, + provider: Arc, +} + #[derive(Clone)] struct CodeActionContents { tasks: Option>, - actions: Option>, + actions: Option>, } impl CodeActionContents { @@ -1395,9 +1402,11 @@ impl CodeActionContents { .map(|(kind, task)| CodeActionsItem::Task(kind.clone(), task.clone())) }) .chain(self.actions.iter().flat_map(|actions| { - actions - .iter() - .map(|action| CodeActionsItem::CodeAction(action.clone())) + actions.iter().map(|available| CodeActionsItem::CodeAction { + excerpt_id: available.excerpt_id, + action: available.action.clone(), + provider: available.provider.clone(), + }) })) } fn get(&self, index: usize) -> Option { @@ -1410,10 +1419,13 @@ impl CodeActionContents { .cloned() .map(|(kind, task)| CodeActionsItem::Task(kind, task)) } else { - actions - .get(index - tasks.templates.len()) - .cloned() - .map(CodeActionsItem::CodeAction) + actions.get(index - tasks.templates.len()).map(|available| { + CodeActionsItem::CodeAction { + excerpt_id: available.excerpt_id, + action: available.action.clone(), + provider: available.provider.clone(), + } + }) } } (Some(tasks), None) => tasks @@ -1421,7 +1433,15 @@ impl CodeActionContents { .get(index) .cloned() .map(|(kind, task)| CodeActionsItem::Task(kind, task)), - (None, Some(actions)) => actions.get(index).cloned().map(CodeActionsItem::CodeAction), + (None, Some(actions)) => { + actions + .get(index) + .map(|available| CodeActionsItem::CodeAction { + excerpt_id: available.excerpt_id, + action: available.action.clone(), + provider: available.provider.clone(), + }) + } (None, None) => None, } } @@ -1431,7 +1451,11 @@ impl CodeActionContents { #[derive(Clone)] enum CodeActionsItem { Task(TaskSourceKind, ResolvedTask), - CodeAction(CodeAction), + CodeAction { + excerpt_id: ExcerptId, + action: CodeAction, + provider: Arc, + }, } impl CodeActionsItem { @@ -1442,14 +1466,14 @@ impl CodeActionsItem { Some(task) } fn as_code_action(&self) -> Option<&CodeAction> { - let Self::CodeAction(action) = self else { + let Self::CodeAction { action, .. } = self else { return None; }; Some(action) } fn label(&self) -> String { match self { - Self::CodeAction(action) => action.lsp_action.title.clone(), + Self::CodeAction { action, .. } => action.lsp_action.title.clone(), Self::Task(_, task) => task.resolved_label.clone(), } } @@ -1588,7 +1612,9 @@ impl CodeActionsMenu { .enumerate() .max_by_key(|(_, action)| match action { CodeActionsItem::Task(_, task) => task.resolved_label.chars().count(), - CodeActionsItem::CodeAction(action) => action.lsp_action.title.chars().count(), + CodeActionsItem::CodeAction { action, .. } => { + action.lsp_action.title.chars().count() + } }) .map(|(ix, _)| ix), ) @@ -1864,6 +1890,11 @@ impl Editor { None }; + let mut code_action_providers = Vec::new(); + if let Some(project) = project.clone() { + code_action_providers.push(Arc::new(project) as Arc<_>); + } + let mut this = Self { focus_handle, show_cursor_when_unfocused: false, @@ -1915,6 +1946,7 @@ impl Editor { next_completion_id: 0, completion_documentation_pre_resolve_debounce: DebouncedDelay::new(), next_inlay_id: 0, + code_action_providers, available_code_actions: Default::default(), code_actions_task: Default::default(), document_highlights_task: Default::default(), @@ -4553,7 +4585,7 @@ impl Editor { let action = action.clone(); cx.spawn(|editor, mut cx| async move { while let Some(prev_task) = task { - prev_task.await; + prev_task.await.log_err(); task = editor.update(&mut cx, |this, _| this.code_actions_task.take())?; } @@ -4727,17 +4759,16 @@ impl Editor { Some(Task::ready(Ok(()))) }) } - CodeActionsItem::CodeAction(action) => { - let apply_code_actions = workspace - .read(cx) - .project() - .clone() - .update(cx, |project, cx| { - project.apply_code_action(buffer, action, true, cx) - }); + CodeActionsItem::CodeAction { + excerpt_id, + action, + provider, + } => { + let apply_code_action = + provider.apply_code_action(buffer, action, excerpt_id, true, cx); let workspace = workspace.downgrade(); Some(cx.spawn(|editor, cx| async move { - let project_transaction = apply_code_actions.await?; + let project_transaction = apply_code_action.await?; Self::open_project_transaction( &editor, workspace, @@ -4835,8 +4866,16 @@ impl Editor { Ok(()) } + pub fn push_code_action_provider( + &mut self, + provider: Arc, + cx: &mut ViewContext, + ) { + self.code_action_providers.push(provider); + self.refresh_code_actions(cx); + } + fn refresh_code_actions(&mut self, cx: &mut ViewContext) -> Option<()> { - let project = self.project.clone()?; let buffer = self.buffer.read(cx); let newest_selection = self.selections.newest_anchor().clone(); let (start_buffer, start) = buffer.text_anchor_for_position(newest_selection.start, cx)?; @@ -4850,13 +4889,30 @@ impl Editor { .timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT) .await; - let actions = if let Ok(code_actions) = project.update(&mut cx, |project, cx| { - project.code_actions(&start_buffer, start..end, cx) - }) { - code_actions.await - } else { - Vec::new() - }; + let (providers, tasks) = this.update(&mut cx, |this, cx| { + let providers = this.code_action_providers.clone(); + let tasks = this + .code_action_providers + .iter() + .map(|provider| provider.code_actions(&start_buffer, start..end, cx)) + .collect::>(); + (providers, tasks) + })?; + + let mut actions = Vec::new(); + for (provider, provider_actions) in + providers.into_iter().zip(future::join_all(tasks).await) + { + if let Some(provider_actions) = provider_actions.log_err() { + actions.extend(provider_actions.into_iter().map(|action| { + AvailableCodeAction { + excerpt_id: newest_selection.start.excerpt_id, + action, + provider: provider.clone(), + } + })); + } + } this.update(&mut cx, |this, cx| { this.available_code_actions = if actions.is_empty() { @@ -4872,7 +4928,6 @@ impl Editor { }; cx.notify(); }) - .log_err(); })); None } @@ -9685,7 +9740,7 @@ impl Editor { }) .context("location tasks preparation")?; - let locations = futures::future::join_all(location_tasks) + let locations = future::join_all(location_tasks) .await .into_iter() .filter_map(|location| location.transpose()) @@ -12574,6 +12629,48 @@ pub trait CompletionProvider { } } +pub trait CodeActionProvider { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>>; + + fn apply_code_action( + &self, + buffer_handle: Model, + action: CodeAction, + excerpt_id: ExcerptId, + push_to_history: bool, + cx: &mut WindowContext, + ) -> Task>; +} + +impl CodeActionProvider for Model { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>> { + self.update(cx, |project, cx| project.code_actions(buffer, range, cx)) + } + + fn apply_code_action( + &self, + buffer_handle: Model, + action: CodeAction, + _excerpt_id: ExcerptId, + push_to_history: bool, + cx: &mut WindowContext, + ) -> Task> { + self.update(cx, |project, cx| { + project.apply_code_action(buffer_handle, action, push_to_history, cx) + }) + } +} + fn snippet_completions( project: &Project, buffer: &Model, diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index b909e63271c069..3035892d7a17f2 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -407,7 +407,11 @@ impl BackgroundExecutor { /// How many CPUs are available to the dispatcher. pub fn num_cpus(&self) -> usize { - num_cpus::get() + #[cfg(any(test, feature = "test-support"))] + return 4; + + #[cfg(not(any(test, feature = "test-support")))] + return num_cpus::get(); } /// Whether we're on the main thread. diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index d406f9bfaf6ac2..0df196bb9829dc 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1810,6 +1810,69 @@ impl MultiBuffer { self.as_singleton().unwrap().read(cx).is_parsing() } + pub fn resize_excerpt( + &mut self, + id: ExcerptId, + range: Range, + cx: &mut ModelContext, + ) { + self.sync(cx); + + let snapshot = self.snapshot(cx); + let locator = snapshot.excerpt_locator_for_id(id); + let mut new_excerpts = SumTree::default(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(&()); + let mut edits = Vec::>::new(); + + let prefix = cursor.slice(&Some(locator), Bias::Left, &()); + new_excerpts.append(prefix, &()); + + let mut excerpt = cursor.item().unwrap().clone(); + let old_text_len = excerpt.text_summary.len; + + excerpt.range.context.start = range.start; + excerpt.range.context.end = range.end; + excerpt.max_buffer_row = range.end.to_point(&excerpt.buffer).row; + + excerpt.text_summary = excerpt + .buffer + .text_summary_for_range(excerpt.range.context.clone()); + + let new_start_offset = new_excerpts.summary().text.len; + let old_start_offset = cursor.start().1; + let edit = Edit { + old: old_start_offset..old_start_offset + old_text_len, + new: new_start_offset..new_start_offset + excerpt.text_summary.len, + }; + + if let Some(last_edit) = edits.last_mut() { + if last_edit.old.end == edit.old.start { + last_edit.old.end = edit.old.end; + last_edit.new.end = edit.new.end; + } else { + edits.push(edit); + } + } else { + edits.push(edit); + } + + new_excerpts.push(excerpt, &()); + + cursor.next(&()); + + new_excerpts.append(cursor.suffix(&()), &()); + + drop(cursor); + self.snapshot.borrow_mut().excerpts = new_excerpts; + + self.subscriptions.publish_mut(edits); + cx.emit(Event::Edited { + singleton_buffer_edited: false, + }); + cx.emit(Event::ExcerptsExpanded { ids: vec![id] }); + cx.notify(); + } + pub fn expand_excerpts( &mut self, ids: impl IntoIterator, @@ -3139,6 +3202,10 @@ impl MultiBufferSnapshot { None } + pub fn context_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { + Some(self.excerpt(excerpt_id)?.range.context.clone()) + } + pub fn can_resolve(&self, anchor: &Anchor) -> bool { if anchor.excerpt_id == ExcerptId::min() || anchor.excerpt_id == ExcerptId::max() { true diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 4506fcc6feb430..b2920bc791c47d 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1431,7 +1431,7 @@ impl LspStore { buffer_handle: &Model, range: Range, cx: &mut ModelContext, - ) -> Task> { + ) -> Task>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request_task = upstream_client.request(proto::MultiLspQuery { buffer_id: buffer_handle.read(cx).remote_id().into(), @@ -1451,14 +1451,11 @@ impl LspStore { let buffer = buffer_handle.clone(); cx.spawn(|weak_project, cx| async move { let Some(project) = weak_project.upgrade() else { - return Vec::new(); + return Ok(Vec::new()); }; - join_all( - request_task - .await - .log_err() - .map(|response| response.responses) - .unwrap_or_default() + let responses = request_task.await?.responses; + let actions = join_all( + responses .into_iter() .filter_map(|lsp_response| match lsp_response.response? { proto::lsp_response::Response::GetCodeActionsResponse(response) => { @@ -1470,7 +1467,7 @@ impl LspStore { } }) .map(|code_actions_response| { - let response = GetCodeActions { + GetCodeActions { range: range.clone(), kinds: None, } @@ -1479,14 +1476,17 @@ impl LspStore { project.clone(), buffer.clone(), cx.clone(), - ); - async move { response.await.log_err().unwrap_or_default() } + ) }), ) - .await - .into_iter() - .flatten() - .collect() + .await; + + Ok(actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .collect()) }) } else { let all_actions_task = self.request_multiple_lsp_locally( @@ -1498,7 +1498,9 @@ impl LspStore { }, cx, ); - cx.spawn(|_, _| async move { all_actions_task.await.into_iter().flatten().collect() }) + cx.spawn( + |_, _| async move { Ok(all_actions_task.await.into_iter().flatten().collect()) }, + ) } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 0c54a16187a4a6..b1347c6d063f25 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3247,7 +3247,7 @@ impl Project { buffer_handle: &Model, range: Range, cx: &mut ModelContext, - ) -> Task> { + ) -> Task>> { let buffer = buffer_handle.read(cx); let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); self.lsp_store.update(cx, |lsp_store, cx| { diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index d0d67f0cda4a40..a7d2e6766c2330 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -2708,7 +2708,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) { .next() .await; - let action = actions.await[0].clone(); + let action = actions.await.unwrap()[0].clone(); let apply = project.update(cx, |project, cx| { project.apply_code_action(buffer.clone(), action, true, cx) }); @@ -5046,6 +5046,7 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { vec!["TailwindServer code action", "TypeScriptServer code action"], code_actions_task .await + .unwrap() .into_iter() .map(|code_action| code_action.lsp_action.title) .sorted() diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index fac3c55bf45506..d5b719a657628e 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -2745,7 +2745,7 @@ pub mod tests { search_view .results_editor .update(cx, |editor, cx| editor.display_text(cx)), - "\n\n\nconst ONE: usize = 1;\n\n\n\n\nconst TWO: usize = one::ONE + one::ONE;\n", + "\n\n\nconst TWO: usize = one::ONE + one::ONE;\n\n\n\n\nconst ONE: usize = 1;\n", "New search in directory should have a filter that matches a certain directory" ); }) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 1fbeab38a2e8b4..92bfc8c5c56d4d 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -675,7 +675,9 @@ impl DelayedDebouncedEditAction { pub enum Event { PaneAdded(View), PaneRemoved, - ItemAdded, + ItemAdded { + item: Box, + }, ItemRemoved, ActiveItemChanged, UserSavedItem { @@ -2984,7 +2986,9 @@ impl Workspace { match event { pane::Event::AddItem { item } => { item.added_to_pane(self, pane, cx); - cx.emit(Event::ItemAdded); + cx.emit(Event::ItemAdded { + item: item.boxed_clone(), + }); } pane::Event::Split(direction) => { self.split_and_clone(pane, *direction, cx); From 1ff10b71c8ea9cae6263225445e35c68ab0808be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8B=90=E7=8B=B8?= <134658521+Huliiiiii@users.noreply.github.com> Date: Tue, 24 Sep 2024 03:39:01 +0800 Subject: [PATCH 279/762] lua: Add auto-close for single quote strings and highlight escape sequences (#18199) - Add auto close to single quote string - Add syntax highlights to escape sequence --- extensions/lua/languages/lua/config.toml | 2 +- extensions/lua/languages/lua/highlights.scm | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/extensions/lua/languages/lua/config.toml b/extensions/lua/languages/lua/config.toml index 6c3aee09ea71b2..7ec8ef2f03f549 100644 --- a/extensions/lua/languages/lua/config.toml +++ b/extensions/lua/languages/lua/config.toml @@ -8,6 +8,6 @@ brackets = [ { start = "[", end = "]", close = true, newline = true }, { start = "(", end = ")", close = true, newline = true }, { start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] }, - { start = "'", end = "'", close = false, newline = false, not_in = ["string"] }, + { start = "'", end = "'", close = true, newline = false, not_in = ["string"] }, ] collapsed_placeholder = "--[ ... ]--" diff --git a/extensions/lua/languages/lua/highlights.scm b/extensions/lua/languages/lua/highlights.scm index 98e2c2eaff6380..7b0b8364ea2d38 100644 --- a/extensions/lua/languages/lua/highlights.scm +++ b/extensions/lua/languages/lua/highlights.scm @@ -196,3 +196,4 @@ (number) @number (string) @string +(escape_sequence) @string.escape From e95e1c9ae5bd94ffaa595b8e56a727802441457d Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 23 Sep 2024 19:45:00 +0000 Subject: [PATCH 280/762] Add '?plain=1' to Permalinks on GitLab/GitHub for md files (#18241) Improve our Permalinks to markdown files. GitHub/GitLab supports the same URL syntax. --- crates/git_hosting_providers/src/providers/github.rs | 3 +++ crates/git_hosting_providers/src/providers/gitlab.rs | 3 +++ 2 files changed, 6 insertions(+) diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index be46b51ddf7bdf..77eaa80961e61c 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -145,6 +145,9 @@ impl GitHostingProvider for Github { .base_url() .join(&format!("{owner}/{repo}/blob/{sha}/{path}")) .unwrap(); + if path.ends_with(".md") { + permalink.set_query(Some("plain=1")); + } permalink.set_fragment( selection .map(|selection| self.line_fragment(&selection)) diff --git a/crates/git_hosting_providers/src/providers/gitlab.rs b/crates/git_hosting_providers/src/providers/gitlab.rs index ccb8a7280a2c64..36ee214cf9d47c 100644 --- a/crates/git_hosting_providers/src/providers/gitlab.rs +++ b/crates/git_hosting_providers/src/providers/gitlab.rs @@ -65,6 +65,9 @@ impl GitHostingProvider for Gitlab { .base_url() .join(&format!("{owner}/{repo}/-/blob/{sha}/{path}")) .unwrap(); + if path.ends_with(".md") { + permalink.set_query(Some("plain=1")); + } permalink.set_fragment( selection .map(|selection| self.line_fragment(&selection)) From e4080ef565f71c15a4cd89f6b0d565b82ec53f7d Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Sep 2024 14:33:28 -0600 Subject: [PATCH 281/762] Move formatting to LSP store (#18242) Release Notes: - ssh-remoting: Fixed format on save --------- Co-authored-by: Mikayla --- .../src/activity_indicator.rs | 2 +- crates/collab/src/tests/integration_tests.rs | 4 +- crates/editor/src/editor.rs | 4 +- crates/editor/src/items.rs | 4 +- crates/project/src/lsp_store.rs | 649 +++++++++++++++++- crates/project/src/prettier_store.rs | 8 +- crates/project/src/project.rs | 639 +---------------- crates/project/src/project_tests.rs | 2 +- 8 files changed, 655 insertions(+), 657 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index a9ae7d075d10c2..fee0ef73f7bee1 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -280,7 +280,7 @@ impl ActivityIndicator { } // Show any formatting failure - if let Some(failure) = self.project.read(cx).last_formatting_failure() { + if let Some(failure) = self.project.read(cx).last_formatting_failure(cx) { return Some(Content { icon: Some( Icon::new(IconName::Warning) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 51593e081e46c1..d5cef3589cce33 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -28,8 +28,8 @@ use live_kit_client::MacOSDisplay; use lsp::LanguageServerId; use parking_lot::Mutex; use project::{ - search::SearchQuery, search::SearchResult, DiagnosticSummary, FormatTrigger, HoverBlockKind, - Project, ProjectPath, + lsp_store::FormatTrigger, search::SearchQuery, search::SearchResult, DiagnosticSummary, + HoverBlockKind, Project, ProjectPath, }; use rand::prelude::*; use serde_json::json; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index cbc272d995213c..dc536471023f08 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -122,8 +122,8 @@ use ordered_float::OrderedFloat; use parking_lot::{Mutex, RwLock}; use project::project_settings::{GitGutterSetting, ProjectSettings}; use project::{ - CodeAction, Completion, CompletionIntent, FormatTrigger, Item, Location, Project, ProjectPath, - ProjectTransaction, TaskSourceKind, + lsp_store::FormatTrigger, CodeAction, Completion, CompletionIntent, Item, Location, Project, + ProjectPath, ProjectTransaction, TaskSourceKind, }; use rand::prelude::*; use rpc::{proto::*, ErrorExt}; diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 1d301f2ee68cd6..b3f4cc813fe8ad 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -20,8 +20,8 @@ use language::{ }; use multi_buffer::AnchorRangeExt; use project::{ - project_settings::ProjectSettings, search::SearchQuery, FormatTrigger, Item as _, Project, - ProjectPath, + lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, Item as _, + Project, ProjectPath, }; use rpc::proto::{self, update_view, PeerId}; use settings::Settings; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index b2920bc791c47d..6673f9da1ddd71 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1,5 +1,6 @@ use crate::{ buffer_store::{BufferStore, BufferStoreEvent}, + deserialize_code_actions, environment::ProjectEnvironment, lsp_command::{self, *}, lsp_ext_command, @@ -19,7 +20,7 @@ use futures::{ future::{join_all, BoxFuture, Shared}, select, stream::FuturesUnordered, - Future, FutureExt, StreamExt, + AsyncWriteExt, Future, FutureExt, StreamExt, }; use globset::{Glob, GlobSet, GlobSetBuilder}; use gpui::{ @@ -29,12 +30,13 @@ use gpui::{ use http_client::{AsyncBody, HttpClient, Request, Response, Uri}; use language::{ language_settings::{ - all_language_settings, language_settings, AllLanguageSettings, LanguageSettings, + all_language_settings, language_settings, AllLanguageSettings, FormatOnSave, Formatter, + LanguageSettings, SelectedFormatter, }, markdown, point_to_lsp, prepare_completion_documentation, proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, - DiagnosticEntry, DiagnosticSet, Documentation, File as _, Language, LanguageConfig, + DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageConfig, LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, LspAdapterDelegate, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, @@ -90,12 +92,38 @@ const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1); const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); pub const SERVER_PROGRESS_THROTTLE_TIMEOUT: Duration = Duration::from_millis(100); +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FormatTrigger { + Save, + Manual, +} + +// Currently, formatting operations are represented differently depending on +// whether they come from a language server or an external command. +#[derive(Debug)] +pub enum FormatOperation { + Lsp(Vec<(Range, String)>), + External(Diff), + Prettier(Diff), +} + +impl FormatTrigger { + fn from_proto(value: i32) -> FormatTrigger { + match value { + 0 => FormatTrigger::Save, + 1 => FormatTrigger::Manual, + _ => FormatTrigger::Save, + } + } +} + pub struct LocalLspStore { http_client: Option>, environment: Model, fs: Arc, yarn: Model, pub language_servers: HashMap, + buffers_being_formatted: HashSet, last_workspace_edits_by_language_server: HashMap, language_server_watched_paths: HashMap>, language_server_watcher_registrations: @@ -104,6 +132,7 @@ pub struct LocalLspStore { HashMap)>, prettier_store: Model, current_lsp_settings: HashMap, + last_formatting_failure: Option, _subscription: gpui::Subscription, } @@ -128,6 +157,485 @@ impl LocalLspStore { futures::future::join_all(shutdown_futures).await; } } + async fn format_locally( + lsp_store: WeakModel, + mut buffers_with_paths: Vec<(Model, Option)>, + push_to_history: bool, + trigger: FormatTrigger, + mut cx: AsyncAppContext, + ) -> anyhow::Result { + // Do not allow multiple concurrent formatting requests for the + // same buffer. + lsp_store.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + buffers_with_paths.retain(|(buffer, _)| { + this.buffers_being_formatted + .insert(buffer.read(cx).remote_id()) + }); + })?; + + let _cleanup = defer({ + let this = lsp_store.clone(); + let mut cx = cx.clone(); + let buffers = &buffers_with_paths; + move || { + this.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + for (buffer, _) in buffers { + this.buffers_being_formatted + .remove(&buffer.read(cx).remote_id()); + } + }) + .ok(); + } + }); + + let mut project_transaction = ProjectTransaction::default(); + for (buffer, buffer_abs_path) in &buffers_with_paths { + let (primary_adapter_and_server, adapters_and_servers) = + lsp_store.update(&mut cx, |lsp_store, cx| { + let buffer = buffer.read(cx); + + let adapters_and_servers = lsp_store + .language_servers_for_buffer(buffer, cx) + .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())) + .collect::>(); + + let primary_adapter = lsp_store + .primary_language_server_for_buffer(buffer, cx) + .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())); + + (primary_adapter, adapters_and_servers) + })?; + + let settings = buffer.update(&mut cx, |buffer, cx| { + language_settings(buffer.language(), buffer.file(), cx).clone() + })?; + + let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save; + let ensure_final_newline = settings.ensure_final_newline_on_save; + + // First, format buffer's whitespace according to the settings. + let trailing_whitespace_diff = if remove_trailing_whitespace { + Some( + buffer + .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))? + .await, + ) + } else { + None + }; + let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| { + buffer.finalize_last_transaction(); + buffer.start_transaction(); + if let Some(diff) = trailing_whitespace_diff { + buffer.apply_diff(diff, cx); + } + if ensure_final_newline { + buffer.ensure_final_newline(cx); + } + buffer.end_transaction(cx) + })?; + + // Apply the `code_actions_on_format` before we run the formatter. + let code_actions = deserialize_code_actions(&settings.code_actions_on_format); + #[allow(clippy::nonminimal_bool)] + if !code_actions.is_empty() + && !(trigger == FormatTrigger::Save && settings.format_on_save == FormatOnSave::Off) + { + LspStore::execute_code_actions_on_servers( + &lsp_store, + &adapters_and_servers, + code_actions, + buffer, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await?; + } + + // Apply language-specific formatting using either the primary language server + // or external command. + // Except for code actions, which are applied with all connected language servers. + let primary_language_server = + primary_adapter_and_server.map(|(_adapter, server)| server.clone()); + let server_and_buffer = primary_language_server + .as_ref() + .zip(buffer_abs_path.as_ref()); + + let prettier_settings = buffer.read_with(&cx, |buffer, cx| { + language_settings(buffer.language(), buffer.file(), cx) + .prettier + .clone() + })?; + + let mut format_operations: Vec = vec![]; + { + match trigger { + FormatTrigger::Save => { + match &settings.format_on_save { + FormatOnSave::Off => { + // nothing + } + FormatOnSave::On => { + match &settings.formatter { + SelectedFormatter::Auto => { + // do the auto-format: prefer prettier, fallback to primary language server + let diff = { + if prettier_settings.allowed { + Self::perform_format( + &Formatter::Prettier, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + } else { + Self::perform_format( + &Formatter::LanguageServer { name: None }, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + } + } + .log_err() + .flatten(); + if let Some(op) = diff { + format_operations.push(op); + } + } + SelectedFormatter::List(formatters) => { + for formatter in formatters.as_ref() { + let diff = Self::perform_format( + formatter, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + .log_err() + .flatten(); + if let Some(op) = diff { + format_operations.push(op); + } + + // format with formatter + } + } + } + } + FormatOnSave::List(formatters) => { + for formatter in formatters.as_ref() { + let diff = Self::perform_format( + formatter, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + .log_err() + .flatten(); + if let Some(op) = diff { + format_operations.push(op); + } + } + } + } + } + FormatTrigger::Manual => { + match &settings.formatter { + SelectedFormatter::Auto => { + // do the auto-format: prefer prettier, fallback to primary language server + let diff = { + if prettier_settings.allowed { + Self::perform_format( + &Formatter::Prettier, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + } else { + Self::perform_format( + &Formatter::LanguageServer { name: None }, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + } + } + .log_err() + .flatten(); + + if let Some(op) = diff { + format_operations.push(op) + } + } + SelectedFormatter::List(formatters) => { + for formatter in formatters.as_ref() { + // format with formatter + let diff = Self::perform_format( + formatter, + server_and_buffer, + lsp_store.clone(), + buffer, + buffer_abs_path, + &settings, + &adapters_and_servers, + push_to_history, + &mut project_transaction, + &mut cx, + ) + .await + .log_err() + .flatten(); + if let Some(op) = diff { + format_operations.push(op); + } + } + } + } + } + } + } + + buffer.update(&mut cx, |b, cx| { + // If the buffer had its whitespace formatted and was edited while the language-specific + // formatting was being computed, avoid applying the language-specific formatting, because + // it can't be grouped with the whitespace formatting in the undo history. + if let Some(transaction_id) = whitespace_transaction_id { + if b.peek_undo_stack() + .map_or(true, |e| e.transaction_id() != transaction_id) + { + format_operations.clear(); + } + } + + // Apply any language-specific formatting, and group the two formatting operations + // in the buffer's undo history. + for operation in format_operations { + match operation { + FormatOperation::Lsp(edits) => { + b.edit(edits, None, cx); + } + FormatOperation::External(diff) => { + b.apply_diff(diff, cx); + } + FormatOperation::Prettier(diff) => { + b.apply_diff(diff, cx); + } + } + + if let Some(transaction_id) = whitespace_transaction_id { + b.group_until_transaction(transaction_id); + } else if let Some(transaction) = project_transaction.0.get(buffer) { + b.group_until_transaction(transaction.id) + } + } + + if let Some(transaction) = b.finalize_last_transaction().cloned() { + if !push_to_history { + b.forget_transaction(transaction.id); + } + project_transaction.0.insert(buffer.clone(), transaction); + } + })?; + } + + Ok(project_transaction) + } + + #[allow(clippy::too_many_arguments)] + async fn perform_format( + formatter: &Formatter, + primary_server_and_buffer: Option<(&Arc, &PathBuf)>, + lsp_store: WeakModel, + buffer: &Model, + buffer_abs_path: &Option, + settings: &LanguageSettings, + adapters_and_servers: &[(Arc, Arc)], + push_to_history: bool, + transaction: &mut ProjectTransaction, + cx: &mut AsyncAppContext, + ) -> Result, anyhow::Error> { + let result = match formatter { + Formatter::LanguageServer { name } => { + if let Some((language_server, buffer_abs_path)) = primary_server_and_buffer { + let language_server = if let Some(name) = name { + adapters_and_servers + .iter() + .find_map(|(adapter, server)| { + adapter.name.0.as_ref().eq(name.as_str()).then_some(server) + }) + .unwrap_or(language_server) + } else { + language_server + }; + + Some(FormatOperation::Lsp( + LspStore::format_via_lsp( + &lsp_store, + buffer, + buffer_abs_path, + language_server, + settings, + cx, + ) + .await + .context("failed to format via language server")?, + )) + } else { + None + } + } + Formatter::Prettier => { + let prettier = lsp_store.update(cx, |lsp_store, _cx| { + lsp_store.prettier_store().unwrap().downgrade() + })?; + prettier_store::format_with_prettier(&prettier, buffer, cx) + .await + .transpose() + .ok() + .flatten() + } + Formatter::External { command, arguments } => { + let buffer_abs_path = buffer_abs_path.as_ref().map(|path| path.as_path()); + Self::format_via_external_command(buffer, buffer_abs_path, command, arguments, cx) + .await + .context(format!( + "failed to format via external command {:?}", + command + ))? + .map(FormatOperation::External) + } + Formatter::CodeActions(code_actions) => { + let code_actions = deserialize_code_actions(code_actions); + if !code_actions.is_empty() { + LspStore::execute_code_actions_on_servers( + &lsp_store, + adapters_and_servers, + code_actions, + buffer, + push_to_history, + transaction, + cx, + ) + .await?; + } + None + } + }; + anyhow::Ok(result) + } + + async fn format_via_external_command( + buffer: &Model, + buffer_abs_path: Option<&Path>, + command: &str, + arguments: &[String], + cx: &mut AsyncAppContext, + ) -> Result> { + let working_dir_path = buffer.update(cx, |buffer, cx| { + let file = File::from_dyn(buffer.file())?; + let worktree = file.worktree.read(cx); + let mut worktree_path = worktree.abs_path().to_path_buf(); + if worktree.root_entry()?.is_file() { + worktree_path.pop(); + } + Some(worktree_path) + })?; + + let mut child = smol::process::Command::new(command); + #[cfg(target_os = "windows")] + { + use smol::process::windows::CommandExt; + child.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); + } + + if let Some(working_dir_path) = working_dir_path { + child.current_dir(working_dir_path); + } + + let mut child = child + .args(arguments.iter().map(|arg| { + if let Some(buffer_abs_path) = buffer_abs_path { + arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy()) + } else { + arg.replace("{buffer_path}", "Untitled") + } + })) + .stdin(smol::process::Stdio::piped()) + .stdout(smol::process::Stdio::piped()) + .stderr(smol::process::Stdio::piped()) + .spawn()?; + + let stdin = child + .stdin + .as_mut() + .ok_or_else(|| anyhow!("failed to acquire stdin"))?; + let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?; + for chunk in text.chunks() { + stdin.write_all(chunk.as_bytes()).await?; + } + stdin.flush().await?; + + let output = child.output().await?; + if !output.status.success() { + return Err(anyhow!( + "command failed with exit code {:?}:\nstdout: {}\nstderr: {}", + output.status.code(), + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + )); + } + + let stdout = String::from_utf8(output.stdout)?; + Ok(Some( + buffer + .update(cx, |buffer, cx| buffer.diff(stdout, cx))? + .await, + )) + } } pub struct RemoteLspStore { @@ -221,8 +729,6 @@ pub enum LspStoreEvent { edits: Vec<(lsp::Range, Snippet)>, most_recent_edit: clock::Lamport, }, - StartFormattingLocalBuffer(BufferId), - FinishFormattingLocalBuffer(BufferId), } #[derive(Clone, Debug, Serialize)] @@ -251,6 +757,7 @@ impl LspStore { client.add_model_message_handler(Self::handle_start_language_server); client.add_model_message_handler(Self::handle_update_language_server); client.add_model_message_handler(Self::handle_update_diagnostic_summary); + client.add_model_request_handler(Self::handle_format_buffers); client.add_model_request_handler(Self::handle_resolve_completion_documentation); client.add_model_request_handler(Self::handle_apply_code_action); client.add_model_request_handler(Self::handle_inlay_hints); @@ -366,6 +873,8 @@ impl LspStore { language_server_watched_paths: Default::default(), language_server_watcher_registrations: Default::default(), current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), + buffers_being_formatted: Default::default(), + last_formatting_failure: None, prettier_store, environment, http_client, @@ -387,6 +896,7 @@ impl LspStore { diagnostic_summaries: Default::default(), diagnostics: Default::default(), active_entry: None, + _maintain_workspace_config: Self::maintain_workspace_config(cx), _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), } @@ -1276,7 +1786,7 @@ impl LspStore { } fn apply_on_type_formatting( - &self, + &mut self, buffer: Model, position: Anchor, trigger: String, @@ -1298,25 +1808,18 @@ impl LspStore { .map(language::proto::deserialize_transaction) .transpose() }) - } else { + } else if let Some(local) = self.as_local_mut() { + let buffer_id = buffer.read(cx).remote_id(); + local.buffers_being_formatted.insert(buffer_id); cx.spawn(move |this, mut cx| async move { - // Do not allow multiple concurrent formatting requests for the - // same buffer. - this.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::StartFormattingLocalBuffer( - buffer.read(cx).remote_id(), - )); - })?; - let _cleanup = defer({ let this = this.clone(); let mut cx = cx.clone(); - let closure_buffer = buffer.clone(); move || { - this.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::FinishFormattingLocalBuffer( - closure_buffer.read(cx).remote_id(), - )) + this.update(&mut cx, |this, _| { + if let Some(local) = this.as_local_mut() { + local.buffers_being_formatted.remove(&buffer_id); + } }) .ok(); } @@ -1333,6 +1836,8 @@ impl LspStore { })? .await }) + } else { + Task::ready(Err(anyhow!("No upstream client or local language server"))) } } @@ -4708,6 +5213,110 @@ impl LspStore { .map(language::proto::serialize_transaction), }) } + pub fn last_formatting_failure(&self) -> Option<&str> { + self.as_local() + .and_then(|local| local.last_formatting_failure.as_deref()) + } + + pub fn format( + &mut self, + buffers: HashSet>, + push_to_history: bool, + trigger: FormatTrigger, + cx: &mut ModelContext, + ) -> Task> { + if let Some(_) = self.as_local() { + let buffers_with_paths = buffers + .into_iter() + .map(|buffer_handle| { + let buffer = buffer_handle.read(cx); + let buffer_abs_path = File::from_dyn(buffer.file()) + .and_then(|file| file.as_local().map(|f| f.abs_path(cx))); + (buffer_handle, buffer_abs_path) + }) + .collect::>(); + + cx.spawn(move |lsp_store, mut cx| async move { + let result = LocalLspStore::format_locally( + lsp_store.clone(), + buffers_with_paths, + push_to_history, + trigger, + cx.clone(), + ) + .await; + + lsp_store.update(&mut cx, |lsp_store, _| { + let local = lsp_store.as_local_mut().unwrap(); + match &result { + Ok(_) => local.last_formatting_failure = None, + Err(error) => { + local.last_formatting_failure.replace(error.to_string()); + } + } + })?; + + result + }) + } else if let Some((client, project_id)) = self.upstream_client() { + cx.spawn(move |this, mut cx| async move { + let response = client + .request(proto::FormatBuffers { + project_id, + trigger: trigger as i32, + buffer_ids: buffers + .iter() + .map(|buffer| { + buffer.update(&mut cx, |buffer, _| buffer.remote_id().into()) + }) + .collect::>()?, + }) + .await? + .transaction + .ok_or_else(|| anyhow!("missing transaction"))?; + BufferStore::deserialize_project_transaction( + this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, + response, + push_to_history, + cx, + ) + .await + }) + } else { + Task::ready(Ok(ProjectTransaction::default())) + } + } + + async fn handle_format_buffers( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let sender_id = envelope.original_sender_id().unwrap_or_default(); + let format = this.update(&mut cx, |this, cx| { + let mut buffers = HashSet::default(); + for buffer_id in &envelope.payload.buffer_ids { + let buffer_id = BufferId::new(*buffer_id)?; + buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?); + } + let trigger = FormatTrigger::from_proto(envelope.payload.trigger); + Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx)) + })??; + + let project_transaction = format.await?; + let project_transaction = this.update(&mut cx, |this, cx| { + this.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.serialize_project_transaction_for_peer( + project_transaction, + sender_id, + cx, + ) + }) + })?; + Ok(proto::FormatBuffersResponse { + transaction: Some(project_transaction), + }) + } fn language_settings<'a>( &'a self, diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 75d70c1d3f72f1..82bd8464b2e535 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -25,8 +25,8 @@ use smol::stream::StreamExt; use util::{ResultExt, TryFutureExt}; use crate::{ - worktree_store::WorktreeStore, File, FormatOperation, PathChange, ProjectEntryId, Worktree, - WorktreeId, + lsp_store::WorktreeId, worktree_store::WorktreeStore, File, PathChange, ProjectEntryId, + Worktree, }; pub struct PrettierStore { @@ -644,7 +644,7 @@ pub(super) async fn format_with_prettier( prettier_store: &WeakModel, buffer: &Model, cx: &mut AsyncAppContext, -) -> Option> { +) -> Option> { let prettier_instance = prettier_store .update(cx, |prettier_store, cx| { prettier_store.prettier_instance_for_buffer(buffer, cx) @@ -671,7 +671,7 @@ pub(super) async fn format_with_prettier( let format_result = prettier .format(buffer, buffer_path, cx) .await - .map(FormatOperation::Prettier) + .map(crate::lsp_store::FormatOperation::Prettier) .with_context(|| format!("{} failed to format buffer", prettier_description)); Some(format_result) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b1347c6d063f25..dc9337674b7eb7 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -31,7 +31,7 @@ pub use environment::ProjectEnvironment; use futures::{ channel::mpsc::{self, UnboundedReceiver}, future::try_join_all, - AsyncWriteExt, StreamExt, + StreamExt, }; use git::{blame::Blame, repository::GitRepository}; @@ -41,17 +41,14 @@ use gpui::{ }; use itertools::Itertools; use language::{ - language_settings::{ - language_settings, FormatOnSave, Formatter, InlayHintKind, LanguageSettings, - SelectedFormatter, - }, + language_settings::InlayHintKind, proto::{ deserialize_anchor, serialize_anchor, serialize_line_ending, serialize_version, split_operations, }, Buffer, BufferEvent, CachedLspAdapter, Capability, CodeLabel, ContextProvider, DiagnosticEntry, - Diff, Documentation, File as _, Language, LanguageRegistry, LanguageServerName, PointUtf16, - ToOffset, ToPointUtf16, Transaction, Unclipped, + Documentation, File as _, Language, LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, + ToPointUtf16, Transaction, Unclipped, }; use lsp::{CompletionContext, DocumentHighlightKind, LanguageServer, LanguageServerId}; use lsp_command::*; @@ -84,7 +81,7 @@ use task::{ }; use terminals::Terminals; use text::{Anchor, BufferId}; -use util::{defer, paths::compare_paths, ResultExt as _}; +use util::{paths::compare_paths, ResultExt as _}; use worktree::{CreatedEntry, Snapshot, Traversal}; use worktree_store::{WorktreeStore, WorktreeStoreEvent}; @@ -164,8 +161,6 @@ pub struct Project { search_included_history: SearchHistory, search_excluded_history: SearchHistory, snippets: Model, - last_formatting_failure: Option, - buffers_being_formatted: HashSet, environment: Model, settings_observer: Model, } @@ -477,31 +472,6 @@ impl Hover { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum FormatTrigger { - Save, - Manual, -} - -// Currently, formatting operations are represented differently depending on -// whether they come from a language server or an external command. -#[derive(Debug)] -enum FormatOperation { - Lsp(Vec<(Range, String)>), - External(Diff), - Prettier(Diff), -} - -impl FormatTrigger { - fn from_proto(value: i32) -> FormatTrigger { - match value { - 0 => FormatTrigger::Save, - 1 => FormatTrigger::Manual, - _ => FormatTrigger::Save, - } - } -} - enum EntitySubscription { Project(PendingEntitySubscription), BufferStore(PendingEntitySubscription), @@ -591,7 +561,7 @@ impl Project { client.add_model_message_handler(Self::handle_update_worktree); client.add_model_request_handler(Self::handle_reload_buffers); client.add_model_request_handler(Self::handle_synchronize_buffers); - client.add_model_request_handler(Self::handle_format_buffers); + client.add_model_request_handler(Self::handle_search_project); client.add_model_request_handler(Self::handle_search_candidate_buffers); client.add_model_request_handler(Self::handle_open_buffer_by_id); @@ -695,8 +665,7 @@ impl Project { search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), - last_formatting_failure: None, - buffers_being_formatted: Default::default(), + search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), } @@ -779,8 +748,7 @@ impl Project { search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), - last_formatting_failure: None, - buffers_being_formatted: Default::default(), + search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), }; @@ -967,8 +935,6 @@ impl Project { search_excluded_history: Self::new_search_history(), environment: ProjectEnvironment::new(&worktree_store, None, cx), remotely_created_models: Arc::new(Mutex::new(RemotelyCreatedModels::default())), - last_formatting_failure: None, - buffers_being_formatted: Default::default(), }; this.set_role(role, cx); for worktree in worktrees { @@ -2061,12 +2027,6 @@ impl Project { cx.emit(Event::SnippetEdit(*buffer_id, edits.clone())) } } - LspStoreEvent::StartFormattingLocalBuffer(buffer_id) => { - self.buffers_being_formatted.insert(*buffer_id); - } - LspStoreEvent::FinishFormattingLocalBuffer(buffer_id) => { - self.buffers_being_formatted.remove(buffer_id); - } } } @@ -2352,8 +2312,8 @@ impl Project { self.lsp_store.read(cx).language_server_statuses() } - pub fn last_formatting_failure(&self) -> Option<&str> { - self.last_formatting_failure.as_deref() + pub fn last_formatting_failure<'a>(&self, cx: &'a AppContext) -> Option<&'a str> { + self.lsp_store.read(cx).last_formatting_failure() } pub fn update_diagnostics( @@ -2455,558 +2415,12 @@ impl Project { &mut self, buffers: HashSet>, push_to_history: bool, - trigger: FormatTrigger, + trigger: lsp_store::FormatTrigger, cx: &mut ModelContext, ) -> Task> { - if self.is_local_or_ssh() { - let buffers_with_paths = buffers - .into_iter() - .map(|buffer_handle| { - let buffer = buffer_handle.read(cx); - let buffer_abs_path = File::from_dyn(buffer.file()) - .and_then(|file| file.as_local().map(|f| f.abs_path(cx))); - (buffer_handle, buffer_abs_path) - }) - .collect::>(); - - cx.spawn(move |project, mut cx| async move { - let result = Self::format_locally( - project.clone(), - buffers_with_paths, - push_to_history, - trigger, - cx.clone(), - ) - .await; - - project.update(&mut cx, |project, _| match &result { - Ok(_) => project.last_formatting_failure = None, - Err(error) => { - project.last_formatting_failure.replace(error.to_string()); - } - })?; - - result - }) - } else { - let remote_id = self.remote_id(); - let client = self.client.clone(); - cx.spawn(move |this, mut cx| async move { - if let Some(project_id) = remote_id { - let response = client - .request(proto::FormatBuffers { - project_id, - trigger: trigger as i32, - buffer_ids: buffers - .iter() - .map(|buffer| { - buffer.update(&mut cx, |buffer, _| buffer.remote_id().into()) - }) - .collect::>()?, - }) - .await? - .transaction - .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, - response, - push_to_history, - cx, - ) - .await - } else { - Ok(ProjectTransaction::default()) - } - }) - } - } - - async fn format_locally( - project: WeakModel, - mut buffers_with_paths: Vec<(Model, Option)>, - push_to_history: bool, - trigger: FormatTrigger, - mut cx: AsyncAppContext, - ) -> anyhow::Result { - // Do not allow multiple concurrent formatting requests for the - // same buffer. - let lsp_store = project.update(&mut cx, |this, cx| { - buffers_with_paths.retain(|(buffer, _)| { - this.buffers_being_formatted - .insert(buffer.read(cx).remote_id()) - }); - this.lsp_store.downgrade() - })?; - - let _cleanup = defer({ - let this = project.clone(); - let mut cx = cx.clone(); - let buffers = &buffers_with_paths; - move || { - this.update(&mut cx, |this, cx| { - for (buffer, _) in buffers { - this.buffers_being_formatted - .remove(&buffer.read(cx).remote_id()); - } - }) - .ok(); - } - }); - - let mut project_transaction = ProjectTransaction::default(); - for (buffer, buffer_abs_path) in &buffers_with_paths { - let (primary_adapter_and_server, adapters_and_servers) = - project.update(&mut cx, |project, cx| { - let buffer = buffer.read(cx); - - let adapters_and_servers = project - .language_servers_for_buffer(buffer, cx) - .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())) - .collect::>(); - - let primary_adapter = project - .lsp_store - .read(cx) - .primary_language_server_for_buffer(buffer, cx) - .map(|(adapter, lsp)| (adapter.clone(), lsp.clone())); - - (primary_adapter, adapters_and_servers) - })?; - - let settings = buffer.update(&mut cx, |buffer, cx| { - language_settings(buffer.language(), buffer.file(), cx).clone() - })?; - - let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save; - let ensure_final_newline = settings.ensure_final_newline_on_save; - - // First, format buffer's whitespace according to the settings. - let trailing_whitespace_diff = if remove_trailing_whitespace { - Some( - buffer - .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))? - .await, - ) - } else { - None - }; - let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| { - buffer.finalize_last_transaction(); - buffer.start_transaction(); - if let Some(diff) = trailing_whitespace_diff { - buffer.apply_diff(diff, cx); - } - if ensure_final_newline { - buffer.ensure_final_newline(cx); - } - buffer.end_transaction(cx) - })?; - - // Apply the `code_actions_on_format` before we run the formatter. - let code_actions = deserialize_code_actions(&settings.code_actions_on_format); - #[allow(clippy::nonminimal_bool)] - if !code_actions.is_empty() - && !(trigger == FormatTrigger::Save && settings.format_on_save == FormatOnSave::Off) - { - LspStore::execute_code_actions_on_servers( - &lsp_store, - &adapters_and_servers, - code_actions, - buffer, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await?; - } - - // Apply language-specific formatting using either the primary language server - // or external command. - // Except for code actions, which are applied with all connected language servers. - let primary_language_server = - primary_adapter_and_server.map(|(_adapter, server)| server.clone()); - let server_and_buffer = primary_language_server - .as_ref() - .zip(buffer_abs_path.as_ref()); - - let prettier_settings = buffer.read_with(&cx, |buffer, cx| { - language_settings(buffer.language(), buffer.file(), cx) - .prettier - .clone() - })?; - - let mut format_operations: Vec = vec![]; - { - match trigger { - FormatTrigger::Save => { - match &settings.format_on_save { - FormatOnSave::Off => { - // nothing - } - FormatOnSave::On => { - match &settings.formatter { - SelectedFormatter::Auto => { - // do the auto-format: prefer prettier, fallback to primary language server - let diff = { - if prettier_settings.allowed { - Self::perform_format( - &Formatter::Prettier, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - } else { - Self::perform_format( - &Formatter::LanguageServer { name: None }, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - } - } - .log_err() - .flatten(); - if let Some(op) = diff { - format_operations.push(op); - } - } - SelectedFormatter::List(formatters) => { - for formatter in formatters.as_ref() { - let diff = Self::perform_format( - formatter, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - .log_err() - .flatten(); - if let Some(op) = diff { - format_operations.push(op); - } - - // format with formatter - } - } - } - } - FormatOnSave::List(formatters) => { - for formatter in formatters.as_ref() { - let diff = Self::perform_format( - formatter, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - .log_err() - .flatten(); - if let Some(op) = diff { - format_operations.push(op); - } - } - } - } - } - FormatTrigger::Manual => { - match &settings.formatter { - SelectedFormatter::Auto => { - // do the auto-format: prefer prettier, fallback to primary language server - let diff = { - if prettier_settings.allowed { - Self::perform_format( - &Formatter::Prettier, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - } else { - Self::perform_format( - &Formatter::LanguageServer { name: None }, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - } - } - .log_err() - .flatten(); - - if let Some(op) = diff { - format_operations.push(op) - } - } - SelectedFormatter::List(formatters) => { - for formatter in formatters.as_ref() { - // format with formatter - let diff = Self::perform_format( - formatter, - server_and_buffer, - project.clone(), - buffer, - buffer_abs_path, - &settings, - &adapters_and_servers, - push_to_history, - &mut project_transaction, - &mut cx, - ) - .await - .log_err() - .flatten(); - if let Some(op) = diff { - format_operations.push(op); - } - } - } - } - } - } - } - - buffer.update(&mut cx, |b, cx| { - // If the buffer had its whitespace formatted and was edited while the language-specific - // formatting was being computed, avoid applying the language-specific formatting, because - // it can't be grouped with the whitespace formatting in the undo history. - if let Some(transaction_id) = whitespace_transaction_id { - if b.peek_undo_stack() - .map_or(true, |e| e.transaction_id() != transaction_id) - { - format_operations.clear(); - } - } - - // Apply any language-specific formatting, and group the two formatting operations - // in the buffer's undo history. - for operation in format_operations { - match operation { - FormatOperation::Lsp(edits) => { - b.edit(edits, None, cx); - } - FormatOperation::External(diff) => { - b.apply_diff(diff, cx); - } - FormatOperation::Prettier(diff) => { - b.apply_diff(diff, cx); - } - } - - if let Some(transaction_id) = whitespace_transaction_id { - b.group_until_transaction(transaction_id); - } else if let Some(transaction) = project_transaction.0.get(buffer) { - b.group_until_transaction(transaction.id) - } - } - - if let Some(transaction) = b.finalize_last_transaction().cloned() { - if !push_to_history { - b.forget_transaction(transaction.id); - } - project_transaction.0.insert(buffer.clone(), transaction); - } - })?; - } - - Ok(project_transaction) - } - - #[allow(clippy::too_many_arguments)] - async fn perform_format( - formatter: &Formatter, - primary_server_and_buffer: Option<(&Arc, &PathBuf)>, - project: WeakModel, - buffer: &Model, - buffer_abs_path: &Option, - settings: &LanguageSettings, - adapters_and_servers: &[(Arc, Arc)], - push_to_history: bool, - transaction: &mut ProjectTransaction, - cx: &mut AsyncAppContext, - ) -> Result, anyhow::Error> { - let result = match formatter { - Formatter::LanguageServer { name } => { - if let Some((language_server, buffer_abs_path)) = primary_server_and_buffer { - let language_server = if let Some(name) = name { - adapters_and_servers - .iter() - .find_map(|(adapter, server)| { - adapter.name.0.as_ref().eq(name.as_str()).then_some(server) - }) - .unwrap_or(language_server) - } else { - language_server - }; - - let lsp_store = project.update(cx, |p, _| p.lsp_store.downgrade())?; - Some(FormatOperation::Lsp( - LspStore::format_via_lsp( - &lsp_store, - buffer, - buffer_abs_path, - language_server, - settings, - cx, - ) - .await - .context("failed to format via language server")?, - )) - } else { - None - } - } - Formatter::Prettier => { - let prettier = project.update(cx, |project, cx| { - project - .lsp_store - .read(cx) - .prettier_store() - .unwrap() - .downgrade() - })?; - prettier_store::format_with_prettier(&prettier, buffer, cx) - .await - .transpose() - .ok() - .flatten() - } - Formatter::External { command, arguments } => { - let buffer_abs_path = buffer_abs_path.as_ref().map(|path| path.as_path()); - Self::format_via_external_command(buffer, buffer_abs_path, command, arguments, cx) - .await - .context(format!( - "failed to format via external command {:?}", - command - ))? - .map(FormatOperation::External) - } - Formatter::CodeActions(code_actions) => { - let code_actions = deserialize_code_actions(code_actions); - let lsp_store = project.update(cx, |p, _| p.lsp_store.downgrade())?; - if !code_actions.is_empty() { - LspStore::execute_code_actions_on_servers( - &lsp_store, - adapters_and_servers, - code_actions, - buffer, - push_to_history, - transaction, - cx, - ) - .await?; - } - None - } - }; - anyhow::Ok(result) - } - - async fn format_via_external_command( - buffer: &Model, - buffer_abs_path: Option<&Path>, - command: &str, - arguments: &[String], - cx: &mut AsyncAppContext, - ) -> Result> { - let working_dir_path = buffer.update(cx, |buffer, cx| { - let file = File::from_dyn(buffer.file())?; - let worktree = file.worktree.read(cx); - let mut worktree_path = worktree.abs_path().to_path_buf(); - if worktree.root_entry()?.is_file() { - worktree_path.pop(); - } - Some(worktree_path) - })?; - - let mut child = smol::process::Command::new(command); - #[cfg(target_os = "windows")] - { - use smol::process::windows::CommandExt; - child.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - } - - if let Some(working_dir_path) = working_dir_path { - child.current_dir(working_dir_path); - } - - let mut child = child - .args(arguments.iter().map(|arg| { - if let Some(buffer_abs_path) = buffer_abs_path { - arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy()) - } else { - arg.replace("{buffer_path}", "Untitled") - } - })) - .stdin(smol::process::Stdio::piped()) - .stdout(smol::process::Stdio::piped()) - .stderr(smol::process::Stdio::piped()) - .spawn()?; - - let stdin = child - .stdin - .as_mut() - .ok_or_else(|| anyhow!("failed to acquire stdin"))?; - let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?; - for chunk in text.chunks() { - stdin.write_all(chunk.as_bytes()).await?; - } - stdin.flush().await?; - - let output = child.output().await?; - if !output.status.success() { - return Err(anyhow!( - "command failed with exit code {:?}:\nstdout: {}\nstderr: {}", - output.status.code(), - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr), - )); - } - - let stdout = String::from_utf8(output.stdout)?; - Ok(Some( - buffer - .update(cx, |buffer, cx| buffer.diff(stdout, cx))? - .await, - )) + self.lsp_store.update(cx, |lsp_store, cx| { + lsp_store.format(buffers, push_to_history, trigger, cx) + }) } #[inline(never)] @@ -4210,31 +3624,6 @@ impl Project { Ok(response) } - async fn handle_format_buffers( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let sender_id = envelope.original_sender_id()?; - let format = this.update(&mut cx, |this, cx| { - let mut buffers = HashSet::default(); - for buffer_id in &envelope.payload.buffer_ids { - let buffer_id = BufferId::new(*buffer_id)?; - buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?); - } - let trigger = FormatTrigger::from_proto(envelope.payload.trigger); - Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx)) - })??; - - let project_transaction = format.await?; - let project_transaction = this.update(&mut cx, |this, cx| { - this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) - })?; - Ok(proto::FormatBuffersResponse { - transaction: Some(project_transaction), - }) - } - async fn handle_task_context_for_location( project: Model, envelope: TypedEnvelope, diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index a7d2e6766c2330..9e58caa2442439 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -4,7 +4,7 @@ use futures::{future, StreamExt}; use gpui::{AppContext, SemanticVersion, UpdateGlobal}; use http_client::Url; use language::{ - language_settings::{AllLanguageSettings, LanguageSettingsContent}, + language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent}, tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint, }; From 3ba071b993099cdd9365f2223dd41fabf26df266 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Sep 2024 15:28:04 -0600 Subject: [PATCH 282/762] Allow using system node (#18172) Release Notes: - (Potentially breaking change) Zed will now use the node installed on your $PATH (if it is more recent than v18) instead of downloading its own. You can disable the new behavior with `{"node": {"disable_path_lookup": true}}` in your settings. We do not yet use system/project-local node_modules. --------- Co-authored-by: Mikayla --- Cargo.lock | 4 + assets/settings/default.json | 15 + crates/collab/src/tests/test_server.rs | 6 +- crates/copilot/src/copilot.rs | 12 +- crates/evals/src/eval.rs | 4 +- crates/extension/src/extension_store.rs | 4 +- crates/extension/src/extension_store_test.rs | 6 +- crates/extension/src/wasm_host.rs | 4 +- crates/headless/src/headless.rs | 2 +- crates/http_client/src/http_client.rs | 29 + crates/language/src/language.rs | 1 + crates/languages/src/css.rs | 10 +- crates/languages/src/json.rs | 10 +- crates/languages/src/lib.rs | 6 +- crates/languages/src/python.rs | 10 +- crates/languages/src/tailwind.rs | 10 +- crates/languages/src/typescript.rs | 24 +- crates/languages/src/vtsls.rs | 10 +- crates/languages/src/yaml.rs | 10 +- crates/markdown/examples/markdown.rs | 4 +- crates/markdown/examples/markdown_as_child.rs | 4 +- crates/node_runtime/Cargo.toml | 2 + crates/node_runtime/src/node_runtime.rs | 646 +++++++++++------- crates/prettier/src/prettier.rs | 4 +- crates/project/src/lsp_store.rs | 33 +- crates/project/src/prettier_store.rs | 18 +- crates/project/src/project.rs | 12 +- crates/project/src/project_settings.rs | 15 + crates/remote_server/src/headless_project.rs | 4 +- .../remote_server/src/remote_editing_tests.rs | 4 +- crates/workspace/src/workspace.rs | 10 +- crates/zed/Cargo.toml | 2 + crates/zed/src/main.rs | 32 +- crates/zed/src/zed.rs | 2 +- 34 files changed, 596 insertions(+), 373 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e345736295613b..894dd00f6d7f79 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7190,6 +7190,7 @@ dependencies = [ "async-std", "async-tar", "async-trait", + "async-watch", "async_zip", "futures 0.3.30", "http_client", @@ -7202,6 +7203,7 @@ dependencies = [ "tempfile", "util", "walkdir", + "which 6.0.3", "windows 0.58.0", ] @@ -14393,6 +14395,7 @@ dependencies = [ "ashpd", "assets", "assistant", + "async-watch", "audio", "auto_update", "backtrace", @@ -14466,6 +14469,7 @@ dependencies = [ "session", "settings", "settings_ui", + "shellexpand 2.1.2", "simplelog", "smol", "snippet_provider", diff --git a/assets/settings/default.json b/assets/settings/default.json index e04ab90f217cdf..3e8d3c8c70dd7d 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -771,6 +771,21 @@ "pyrightconfig.json" ] }, + /// By default use a recent system version of node, or install our own. + /// You can override this to use a version of node that is not in $PATH with: + /// { + /// "node": { + /// "node_path": "/path/to/node" + /// "npm_path": "/path/to/npm" (defaults to node_path/../npm) + /// } + /// } + /// or to ensure Zed always downloads and installs an isolated version of node: + /// { + /// "node": { + /// "disable_path_lookup": true + /// } + /// NOTE: changing this setting currently requires restarting Zed. + "node": {}, // The extensions that Zed should automatically install on startup. // // If you don't want any of these extensions, add this field to your settings diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 1421e4c7f7aed7..6f07d76b0b26b0 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -21,7 +21,7 @@ use git::GitHostingProviderRegistry; use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext}; use http_client::FakeHttpClient; use language::LanguageRegistry; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use notifications::NotificationStore; use parking_lot::Mutex; use project::{Project, WorktreeId}; @@ -278,7 +278,7 @@ impl TestServer { languages: language_registry, fs: fs.clone(), build_window_options: |_, _| Default::default(), - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), session, }); @@ -408,7 +408,7 @@ impl TestServer { languages: language_registry, fs: fs.clone(), build_window_options: |_, _| Default::default(), - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), session, }); diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index cdbe65ba1dcca1..a1fd7a9bb96683 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -57,7 +57,7 @@ pub fn init( new_server_id: LanguageServerId, fs: Arc, http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, cx: &mut AppContext, ) { copilot_chat::init(fs, http.clone(), cx); @@ -302,7 +302,7 @@ pub struct Completion { pub struct Copilot { http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, server: CopilotServer, buffers: HashSet>, server_id: LanguageServerId, @@ -334,7 +334,7 @@ impl Copilot { fn start( new_server_id: LanguageServerId, http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, cx: &mut ModelContext, ) -> Self { let mut this = Self { @@ -392,7 +392,7 @@ impl Copilot { #[cfg(any(test, feature = "test-support"))] pub fn fake(cx: &mut gpui::TestAppContext) -> (Model, lsp::FakeLanguageServer) { use lsp::FakeLanguageServer; - use node_runtime::FakeNodeRuntime; + use node_runtime::NodeRuntime; let (server, fake_server) = FakeLanguageServer::new( LanguageServerId(0), @@ -406,7 +406,7 @@ impl Copilot { cx.to_async(), ); let http = http_client::FakeHttpClient::create(|_| async { unreachable!() }); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); let this = cx.new_model(|cx| Self { server_id: LanguageServerId(0), http: http.clone(), @@ -425,7 +425,7 @@ impl Copilot { async fn start_language_server( new_server_id: LanguageServerId, http: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, this: WeakModel, mut cx: AsyncAppContext, ) { diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index e2c8b42644a31d..899d8210537111 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -9,7 +9,7 @@ use git::GitHostingProviderRegistry; use gpui::{AsyncAppContext, BackgroundExecutor, Context, Model}; use http_client::{HttpClient, Method}; use language::LanguageRegistry; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use open_ai::OpenAiEmbeddingModel; use project::Project; use semantic_index::{ @@ -292,7 +292,7 @@ async fn run_evaluation( let user_store = cx .new_model(|cx| UserStore::new(client.clone(), cx)) .unwrap(); - let node_runtime = Arc::new(FakeNodeRuntime {}); + let node_runtime = NodeRuntime::unavailable(); let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json"); let evaluations: Vec = serde_json::from_slice(&evaluations).unwrap(); diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 8dbd618a25784b..5f9fbffb11b2ec 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -177,7 +177,7 @@ actions!(zed, [ReloadExtensions]); pub fn init( fs: Arc, client: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, language_registry: Arc, theme_registry: Arc, cx: &mut AppContext, @@ -228,7 +228,7 @@ impl ExtensionStore { http_client: Arc, builder_client: Arc, telemetry: Option>, - node_runtime: Arc, + node_runtime: NodeRuntime, language_registry: Arc, theme_registry: Arc, slash_command_registry: Arc, diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 4bdafaa32c2aff..126e6b2cfbdad0 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -15,7 +15,7 @@ use http_client::{FakeHttpClient, Response}; use indexed_docs::IndexedDocsRegistry; use isahc_http_client::IsahcHttpClient; use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use parking_lot::Mutex; use project::{Project, DEFAULT_COMPLETION_CONTEXT}; use release_channel::AppVersion; @@ -264,7 +264,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { let slash_command_registry = SlashCommandRegistry::new(); let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor())); let snippet_registry = Arc::new(SnippetRegistry::new()); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); let store = cx.new_model(|cx| { ExtensionStore::new( @@ -490,7 +490,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let slash_command_registry = SlashCommandRegistry::new(); let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor())); let snippet_registry = Arc::new(SnippetRegistry::new()); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); let mut status_updates = language_registry.language_server_binary_statuses(); diff --git a/crates/extension/src/wasm_host.rs b/crates/extension/src/wasm_host.rs index 039f2d923b0d2f..b3fd13a5bada34 100644 --- a/crates/extension/src/wasm_host.rs +++ b/crates/extension/src/wasm_host.rs @@ -33,7 +33,7 @@ pub(crate) struct WasmHost { engine: Engine, release_channel: ReleaseChannel, http_client: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, pub(crate) language_registry: Arc, fs: Arc, pub(crate) work_dir: PathBuf, @@ -80,7 +80,7 @@ impl WasmHost { pub fn new( fs: Arc, http_client: Arc, - node_runtime: Arc, + node_runtime: NodeRuntime, language_registry: Arc, work_dir: PathBuf, cx: &mut AppContext, diff --git a/crates/headless/src/headless.rs b/crates/headless/src/headless.rs index a5504500da0177..1405577643606a 100644 --- a/crates/headless/src/headless.rs +++ b/crates/headless/src/headless.rs @@ -25,7 +25,7 @@ pub struct DevServer { } pub struct AppState { - pub node_runtime: Arc, + pub node_runtime: NodeRuntime, pub user_store: Model, pub languages: Arc, pub fs: Arc, diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index d78b2dd23c7f7f..c0630151519c5a 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -264,6 +264,35 @@ pub fn read_proxy_from_env() -> Option { None } +pub struct BlockedHttpClient; + +impl HttpClient for BlockedHttpClient { + fn send( + &self, + _req: Request, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + Box::pin(async { + Err(std::io::Error::new( + std::io::ErrorKind::PermissionDenied, + "BlockedHttpClient disallowed request", + ) + .into()) + }) + } + + fn proxy(&self) -> Option<&Uri> { + None + } + + fn send_with_redirect_policy( + &self, + req: Request, + _: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.send(req) + } +} + #[cfg(feature = "test-support")] type FakeHttpHandler = Box< dyn Fn(Request) -> BoxFuture<'static, Result, anyhow::Error>> diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 29a7ac1860b0c5..166d846f86e8bc 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -564,6 +564,7 @@ async fn try_fetch_server_binary let name = adapter.name(); log::info!("fetching latest version of language server {:?}", name.0); delegate.update_status(name.clone(), LanguageServerBinaryStatus::CheckingForUpdate); + let latest_version = adapter .fetch_latest_server_version(delegate.as_ref()) .await?; diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index cf259d69d321a8..7b7e9ae77f06fc 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -22,11 +22,11 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct CssLspAdapter { - node: Arc, + node: NodeRuntime, } impl CssLspAdapter { - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { CssLspAdapter { node } } } @@ -81,14 +81,14 @@ impl LspAdapter for CssLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn initialization_options( @@ -103,7 +103,7 @@ impl LspAdapter for CssLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let mut last_version_dir = None; diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 6b5f74c2634b45..44cc68387676ee 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -59,13 +59,13 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct JsonLspAdapter { - node: Arc, + node: NodeRuntime, languages: Arc, workspace_config: OnceLock, } impl JsonLspAdapter { - pub fn new(node: Arc, languages: Arc) -> Self { + pub fn new(node: NodeRuntime, languages: Arc) -> Self { Self { node, languages, @@ -183,14 +183,14 @@ impl LspAdapter for JsonLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn initialization_options( @@ -226,7 +226,7 @@ impl LspAdapter for JsonLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let mut last_version_dir = None; diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 0a3fc71d08961d..7435ddb13196dd 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -30,11 +30,7 @@ mod yaml; #[exclude = "*.rs"] struct LanguageDir; -pub fn init( - languages: Arc, - node_runtime: Arc, - cx: &mut AppContext, -) { +pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mut AppContext) { languages.register_native_grammars([ ("bash", tree_sitter_bash::LANGUAGE), ("c", tree_sitter_c::LANGUAGE), diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 0dce8fb6617616..75f124489c3820 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -26,13 +26,13 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct PythonLspAdapter { - node: Arc, + node: NodeRuntime, } impl PythonLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("pyright"); - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { PythonLspAdapter { node } } } @@ -94,14 +94,14 @@ impl LspAdapter for PythonLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn process_completions(&self, items: &mut [lsp::CompletionItem]) { @@ -198,7 +198,7 @@ impl LspAdapter for PythonLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { let server_path = container_dir.join(SERVER_PATH); if server_path.exists() { diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index e3e17a8fa72eb4..62d967d6a4a267 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -28,14 +28,14 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct TailwindLspAdapter { - node: Arc, + node: NodeRuntime, } impl TailwindLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("tailwindcss-language-server"); - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { TailwindLspAdapter { node } } } @@ -122,14 +122,14 @@ impl LspAdapter for TailwindLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn initialization_options( @@ -198,7 +198,7 @@ impl LspAdapter for TailwindLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let mut last_version_dir = None; diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index b09216c9703698..25a97c8014d0a1 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -65,7 +65,7 @@ fn eslint_server_binary_arguments(server_path: &Path) -> Vec { } pub struct TypeScriptLspAdapter { - node: Arc, + node: NodeRuntime, } impl TypeScriptLspAdapter { @@ -73,7 +73,7 @@ impl TypeScriptLspAdapter { const NEW_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs"; const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("typescript-language-server"); - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { TypeScriptLspAdapter { node } } async fn tsdk_path(adapter: &Arc) -> &'static str { @@ -161,14 +161,14 @@ impl LspAdapter for TypeScriptLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_ts_server_binary(container_dir, &*self.node).await + get_cached_ts_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_ts_server_binary(container_dir, &*self.node).await + get_cached_ts_server_binary(container_dir, &self.node).await } fn code_action_kinds(&self) -> Option> { @@ -264,7 +264,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn get_cached_ts_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let old_server_path = container_dir.join(TypeScriptLspAdapter::OLD_SERVER_PATH); @@ -293,7 +293,7 @@ async fn get_cached_ts_server_binary( } pub struct EsLintLspAdapter { - node: Arc, + node: NodeRuntime, } impl EsLintLspAdapter { @@ -310,7 +310,7 @@ impl EsLintLspAdapter { const FLAT_CONFIG_FILE_NAMES: &'static [&'static str] = &["eslint.config.js", "eslint.config.mjs", "eslint.config.cjs"]; - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { EsLintLspAdapter { node } } } @@ -476,11 +476,11 @@ impl LspAdapter for EsLintLspAdapter { } self.node - .run_npm_subcommand(Some(&repo_root), "install", &[]) + .run_npm_subcommand(&repo_root, "install", &[]) .await?; self.node - .run_npm_subcommand(Some(&repo_root), "run-script", &["compile"]) + .run_npm_subcommand(&repo_root, "run-script", &["compile"]) .await?; } @@ -496,20 +496,20 @@ impl LspAdapter for EsLintLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_eslint_server_binary(container_dir, &*self.node).await + get_cached_eslint_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_eslint_server_binary(container_dir, &*self.node).await + get_cached_eslint_server_binary(container_dir, &self.node).await } } async fn get_cached_eslint_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { // This is unfortunate but we don't know what the version is to build a path directly diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 5ec31213840bb1..3c1cf0fcbe151b 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -20,13 +20,13 @@ fn typescript_server_binary_arguments(server_path: &Path) -> Vec { } pub struct VtslsLspAdapter { - node: Arc, + node: NodeRuntime, } impl VtslsLspAdapter { const SERVER_PATH: &'static str = "node_modules/@vtsls/language-server/bin/vtsls.js"; - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { VtslsLspAdapter { node } } async fn tsdk_path(adapter: &Arc) -> &'static str { @@ -154,14 +154,14 @@ impl LspAdapter for VtslsLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_ts_server_binary(container_dir, &*self.node).await + get_cached_ts_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_ts_server_binary(container_dir, &*self.node).await + get_cached_ts_server_binary(container_dir, &self.node).await } fn code_action_kinds(&self) -> Option> { @@ -298,7 +298,7 @@ impl LspAdapter for VtslsLspAdapter { async fn get_cached_ts_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let server_path = container_dir.join(VtslsLspAdapter::SERVER_PATH); diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 583961f4b1c0e9..32ca73168ab2d2 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -26,12 +26,12 @@ fn server_binary_arguments(server_path: &Path) -> Vec { } pub struct YamlLspAdapter { - node: Arc, + node: NodeRuntime, } impl YamlLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("yaml-language-server"); - pub fn new(node: Arc) -> Self { + pub fn new(node: NodeRuntime) -> Self { YamlLspAdapter { node } } } @@ -117,14 +117,14 @@ impl LspAdapter for YamlLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_server_binary(container_dir, &*self.node).await + get_cached_server_binary(container_dir, &self.node).await } async fn workspace_configuration( @@ -157,7 +157,7 @@ impl LspAdapter for YamlLspAdapter { async fn get_cached_server_binary( container_dir: PathBuf, - node: &dyn NodeRuntime, + node: &NodeRuntime, ) -> Option { maybe!(async { let mut last_version_dir = None; diff --git a/crates/markdown/examples/markdown.rs b/crates/markdown/examples/markdown.rs index c2f3ab8158609a..0514ebcf4e1e57 100644 --- a/crates/markdown/examples/markdown.rs +++ b/crates/markdown/examples/markdown.rs @@ -2,7 +2,7 @@ use assets::Assets; use gpui::{prelude::*, rgb, App, KeyBinding, StyleRefinement, View, WindowOptions}; use language::{language_settings::AllLanguageSettings, LanguageRegistry}; use markdown::{Markdown, MarkdownStyle}; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use settings::SettingsStore; use std::sync::Arc; use theme::LoadThemes; @@ -102,7 +102,7 @@ pub fn main() { }); cx.bind_keys([KeyBinding::new("cmd-c", markdown::Copy, None)]); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); theme::init(LoadThemes::JustBase, cx); let language_registry = LanguageRegistry::new(cx.background_executor().clone()); diff --git a/crates/markdown/examples/markdown_as_child.rs b/crates/markdown/examples/markdown_as_child.rs index 829e69436a6bea..3700e64364dfd8 100644 --- a/crates/markdown/examples/markdown_as_child.rs +++ b/crates/markdown/examples/markdown_as_child.rs @@ -2,7 +2,7 @@ use assets::Assets; use gpui::*; use language::{language_settings::AllLanguageSettings, LanguageRegistry}; use markdown::{Markdown, MarkdownStyle}; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use settings::SettingsStore; use std::sync::Arc; use theme::LoadThemes; @@ -28,7 +28,7 @@ pub fn main() { }); cx.bind_keys([KeyBinding::new("cmd-c", markdown::Copy, None)]); - let node_runtime = FakeNodeRuntime::new(); + let node_runtime = NodeRuntime::unavailable(); let language_registry = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); languages::init(language_registry.clone(), node_runtime, cx); theme::init(LoadThemes::JustBase, cx); diff --git a/crates/node_runtime/Cargo.toml b/crates/node_runtime/Cargo.toml index b7aee583360cba..d852b7ebdf9aac 100644 --- a/crates/node_runtime/Cargo.toml +++ b/crates/node_runtime/Cargo.toml @@ -18,6 +18,7 @@ test-support = ["tempfile"] [dependencies] anyhow.workspace = true async-compression.workspace = true +async-watch.workspace = true async-tar.workspace = true async-trait.workspace = true async_zip.workspace = true @@ -32,6 +33,7 @@ smol.workspace = true tempfile = { workspace = true, optional = true } util.workspace = true walkdir = "2.5.0" +which.workspace = true [target.'cfg(windows)'.dependencies] async-std = { version = "1.12.0", features = ["unstable"] } diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 4aa65ab6db8049..72c74ce7cf9830 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -5,7 +5,7 @@ pub use archive::extract_zip; use async_compression::futures::bufread::GzipDecoder; use async_tar::Archive; use futures::AsyncReadExt; -use http_client::HttpClient; +use http_client::{HttpClient, Uri}; use semver::Version; use serde::Deserialize; use smol::io::BufReader; @@ -23,60 +23,166 @@ use util::ResultExt; #[cfg(windows)] use smol::process::windows::CommandExt; -const VERSION: &str = "v22.5.1"; +#[derive(Clone, Debug, Default, Eq, PartialEq)] +pub struct NodeBinaryOptions { + pub allow_path_lookup: bool, + pub allow_binary_download: bool, + pub use_paths: Option<(PathBuf, PathBuf)>, +} -#[cfg(not(windows))] -const NODE_PATH: &str = "bin/node"; -#[cfg(windows)] -const NODE_PATH: &str = "node.exe"; +#[derive(Clone)] +pub struct NodeRuntime(Arc>); -#[cfg(not(windows))] -const NPM_PATH: &str = "bin/npm"; -#[cfg(windows)] -const NPM_PATH: &str = "node_modules/npm/bin/npm-cli.js"; - -enum ArchiveType { - TarGz, - Zip, +struct NodeRuntimeState { + http: Arc, + instance: Option>, + last_options: Option, + options: async_watch::Receiver>, } -#[derive(Debug, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct NpmInfo { - #[serde(default)] - dist_tags: NpmInfoDistTags, - versions: Vec, -} +impl NodeRuntime { + pub fn new( + http: Arc, + options: async_watch::Receiver>, + ) -> Self { + NodeRuntime(Arc::new(Mutex::new(NodeRuntimeState { + http, + instance: None, + last_options: None, + options, + }))) + } -#[derive(Debug, Deserialize, Default)] -pub struct NpmInfoDistTags { - latest: Option, -} + pub fn unavailable() -> Self { + NodeRuntime(Arc::new(Mutex::new(NodeRuntimeState { + http: Arc::new(http_client::BlockedHttpClient), + instance: None, + last_options: None, + options: async_watch::channel(Some(NodeBinaryOptions::default())).1, + }))) + } -#[async_trait::async_trait] -pub trait NodeRuntime: Send + Sync { - async fn binary_path(&self) -> Result; - async fn node_environment_path(&self) -> Result; + async fn instance(&self) -> Result> { + let mut state = self.0.lock().await; - async fn run_npm_subcommand( + while state.options.borrow().is_none() { + state.options.changed().await?; + } + let options = state.options.borrow().clone().unwrap(); + if state.last_options.as_ref() != Some(&options) { + state.instance.take(); + } + if let Some(instance) = state.instance.as_ref() { + return Ok(instance.boxed_clone()); + } + + if let Some((node, npm)) = options.use_paths.as_ref() { + let instance = SystemNodeRuntime::new(node.clone(), npm.clone()).await?; + state.instance = Some(instance.boxed_clone()); + return Ok(instance); + } + + if options.allow_path_lookup { + if let Some(instance) = SystemNodeRuntime::detect().await { + state.instance = Some(instance.boxed_clone()); + return Ok(instance); + } + } + + let instance = if options.allow_binary_download { + ManagedNodeRuntime::install_if_needed(&state.http).await? + } else { + Box::new(UnavailableNodeRuntime) + }; + + state.instance = Some(instance.boxed_clone()); + return Ok(instance); + } + + pub async fn binary_path(&self) -> Result { + self.instance().await?.binary_path() + } + + pub async fn run_npm_subcommand( &self, - directory: Option<&Path>, + directory: &Path, subcommand: &str, args: &[&str], - ) -> Result; - - async fn npm_package_latest_version(&self, name: &str) -> Result; - - async fn npm_install_packages(&self, directory: &Path, packages: &[(&str, &str)]) - -> Result<()>; + ) -> Result { + let http = self.0.lock().await.http.clone(); + self.instance() + .await? + .run_npm_subcommand(Some(directory), http.proxy(), subcommand, args) + .await + } - async fn npm_package_installed_version( + pub async fn npm_package_installed_version( &self, local_package_directory: &Path, name: &str, - ) -> Result>; + ) -> Result> { + self.instance() + .await? + .npm_package_installed_version(local_package_directory, name) + .await + } - async fn should_install_npm_package( + pub async fn npm_package_latest_version(&self, name: &str) -> Result { + let http = self.0.lock().await.http.clone(); + let output = self + .instance() + .await? + .run_npm_subcommand( + None, + http.proxy(), + "info", + &[ + name, + "--json", + "--fetch-retry-mintimeout", + "2000", + "--fetch-retry-maxtimeout", + "5000", + "--fetch-timeout", + "5000", + ], + ) + .await?; + + let mut info: NpmInfo = serde_json::from_slice(&output.stdout)?; + info.dist_tags + .latest + .or_else(|| info.versions.pop()) + .ok_or_else(|| anyhow!("no version found for npm package {}", name)) + } + + pub async fn npm_install_packages( + &self, + directory: &Path, + packages: &[(&str, &str)], + ) -> Result<()> { + let packages: Vec<_> = packages + .iter() + .map(|(name, version)| format!("{name}@{version}")) + .collect(); + + let mut arguments: Vec<_> = packages.iter().map(|p| p.as_str()).collect(); + arguments.extend_from_slice(&[ + "--save-exact", + "--fetch-retry-mintimeout", + "2000", + "--fetch-retry-maxtimeout", + "5000", + "--fetch-timeout", + "5000", + ]); + + self.run_npm_subcommand(directory, "install", &arguments) + .await?; + Ok(()) + } + + pub async fn should_install_npm_package( &self, package_name: &str, local_executable_path: &Path, @@ -110,21 +216,78 @@ pub trait NodeRuntime: Send + Sync { } } -pub struct RealNodeRuntime { - http: Arc, - installation_lock: Mutex<()>, +enum ArchiveType { + TarGz, + Zip, } -impl RealNodeRuntime { - pub fn new(http: Arc) -> Arc { - Arc::new(RealNodeRuntime { - http, - installation_lock: Mutex::new(()), - }) +#[derive(Debug, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct NpmInfo { + #[serde(default)] + dist_tags: NpmInfoDistTags, + versions: Vec, +} + +#[derive(Debug, Deserialize, Default)] +pub struct NpmInfoDistTags { + latest: Option, +} + +#[async_trait::async_trait] +trait NodeRuntimeTrait: Send + Sync { + fn boxed_clone(&self) -> Box; + fn binary_path(&self) -> Result; + + async fn run_npm_subcommand( + &self, + directory: Option<&Path>, + proxy: Option<&Uri>, + subcommand: &str, + args: &[&str], + ) -> Result; + + async fn npm_package_installed_version( + &self, + local_package_directory: &Path, + name: &str, + ) -> Result>; +} + +#[derive(Clone)] +struct ManagedNodeRuntime { + installation_path: PathBuf, +} + +impl ManagedNodeRuntime { + const VERSION: &str = "v22.5.1"; + + #[cfg(not(windows))] + const NODE_PATH: &str = "bin/node"; + #[cfg(windows)] + const NODE_PATH: &str = "node.exe"; + + #[cfg(not(windows))] + const NPM_PATH: &str = "bin/npm"; + #[cfg(windows)] + const NPM_PATH: &str = "node_modules/npm/bin/npm-cli.js"; + + async fn node_environment_path(&self) -> Result { + let node_binary = self.installation_path.join(Self::NODE_PATH); + let mut env_path = vec![node_binary + .parent() + .expect("invalid node binary path") + .to_path_buf()]; + + if let Some(existing_path) = std::env::var_os("PATH") { + let mut paths = std::env::split_paths(&existing_path).collect::>(); + env_path.append(&mut paths); + } + + std::env::join_paths(env_path).context("failed to create PATH env variable") } - async fn install_if_needed(&self) -> Result { - let _lock = self.installation_lock.lock().await; + async fn install_if_needed(http: &Arc) -> Result> { log::info!("Node runtime install_if_needed"); let os = match consts::OS { @@ -140,11 +303,12 @@ impl RealNodeRuntime { other => bail!("Running on unsupported architecture: {other}"), }; - let folder_name = format!("node-{VERSION}-{os}-{arch}"); + let version = Self::VERSION; + let folder_name = format!("node-{version}-{os}-{arch}"); let node_containing_dir = paths::support_dir().join("node"); let node_dir = node_containing_dir.join(folder_name); - let node_binary = node_dir.join(NODE_PATH); - let npm_file = node_dir.join(NPM_PATH); + let node_binary = node_dir.join(Self::NODE_PATH); + let npm_file = node_dir.join(Self::NPM_PATH); let mut command = Command::new(&node_binary); @@ -177,16 +341,16 @@ impl RealNodeRuntime { other => bail!("Running on unsupported os: {other}"), }; + let version = Self::VERSION; let file_name = format!( - "node-{VERSION}-{os}-{arch}.{extension}", + "node-{version}-{os}-{arch}.{extension}", extension = match archive_type { ArchiveType::TarGz => "tar.gz", ArchiveType::Zip => "zip", } ); - let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}"); - let mut response = self - .http + let url = format!("https://nodejs.org/dist/{version}/{file_name}"); + let mut response = http .get(&url, Default::default(), true) .await .context("error downloading Node binary tarball")?; @@ -207,43 +371,32 @@ impl RealNodeRuntime { _ = fs::write(node_dir.join("blank_user_npmrc"), []).await; _ = fs::write(node_dir.join("blank_global_npmrc"), []).await; - anyhow::Ok(node_dir) + anyhow::Ok(Box::new(ManagedNodeRuntime { + installation_path: node_dir, + })) } } #[async_trait::async_trait] -impl NodeRuntime for RealNodeRuntime { - async fn binary_path(&self) -> Result { - let installation_path = self.install_if_needed().await?; - Ok(installation_path.join(NODE_PATH)) +impl NodeRuntimeTrait for ManagedNodeRuntime { + fn boxed_clone(&self) -> Box { + Box::new(self.clone()) } - async fn node_environment_path(&self) -> Result { - let installation_path = self.install_if_needed().await?; - let node_binary = installation_path.join(NODE_PATH); - let mut env_path = vec![node_binary - .parent() - .expect("invalid node binary path") - .to_path_buf()]; - - if let Some(existing_path) = std::env::var_os("PATH") { - let mut paths = std::env::split_paths(&existing_path).collect::>(); - env_path.append(&mut paths); - } - - Ok(std::env::join_paths(env_path).context("failed to create PATH env variable")?) + fn binary_path(&self) -> Result { + Ok(self.installation_path.join(Self::NODE_PATH)) } async fn run_npm_subcommand( &self, directory: Option<&Path>, + proxy: Option<&Uri>, subcommand: &str, args: &[&str], ) -> Result { let attempt = || async move { - let installation_path = self.install_if_needed().await?; - let node_binary = installation_path.join(NODE_PATH); - let npm_file = installation_path.join(NPM_PATH); + let node_binary = self.installation_path.join(Self::NODE_PATH); + let npm_file = self.installation_path.join(Self::NPM_PATH); let env_path = self.node_environment_path().await?; if smol::fs::metadata(&node_binary).await.is_err() { @@ -258,54 +411,17 @@ impl NodeRuntime for RealNodeRuntime { command.env_clear(); command.env("PATH", env_path); command.arg(npm_file).arg(subcommand); - command.args(["--cache".into(), installation_path.join("cache")]); + command.args(["--cache".into(), self.installation_path.join("cache")]); command.args([ "--userconfig".into(), - installation_path.join("blank_user_npmrc"), + self.installation_path.join("blank_user_npmrc"), ]); command.args([ "--globalconfig".into(), - installation_path.join("blank_global_npmrc"), + self.installation_path.join("blank_global_npmrc"), ]); command.args(args); - - if let Some(directory) = directory { - command.current_dir(directory); - command.args(["--prefix".into(), directory.to_path_buf()]); - } - - if let Some(proxy) = self.http.proxy() { - // Map proxy settings from `http://localhost:10809` to `http://127.0.0.1:10809` - // NodeRuntime without environment information can not parse `localhost` - // correctly. - // TODO: map to `[::1]` if we are using ipv6 - let proxy = proxy - .to_string() - .to_ascii_lowercase() - .replace("localhost", "127.0.0.1"); - - command.args(["--proxy", &proxy]); - } - - #[cfg(windows)] - { - // SYSTEMROOT is a critical environment variables for Windows. - if let Some(val) = std::env::var("SYSTEMROOT") - .context("Missing environment variable: SYSTEMROOT!") - .log_err() - { - command.env("SYSTEMROOT", val); - } - // Without ComSpec, the post-install will always fail. - if let Some(val) = std::env::var("ComSpec") - .context("Missing environment variable: ComSpec!") - .log_err() - { - command.env("ComSpec", val); - } - command.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); - } - + configure_npm_command(&mut command, directory, proxy); command.output().await.map_err(|e| anyhow!("{e}")) }; @@ -332,182 +448,228 @@ impl NodeRuntime for RealNodeRuntime { output.map_err(|e| anyhow!("{e}")) } - - async fn npm_package_latest_version(&self, name: &str) -> Result { - let output = self - .run_npm_subcommand( - None, - "info", - &[ - name, - "--json", - "--fetch-retry-mintimeout", - "2000", - "--fetch-retry-maxtimeout", - "5000", - "--fetch-timeout", - "5000", - ], - ) - .await?; - - let mut info: NpmInfo = serde_json::from_slice(&output.stdout)?; - info.dist_tags - .latest - .or_else(|| info.versions.pop()) - .ok_or_else(|| anyhow!("no version found for npm package {}", name)) - } - async fn npm_package_installed_version( &self, local_package_directory: &Path, name: &str, ) -> Result> { - let mut package_json_path = local_package_directory.to_owned(); - package_json_path.extend(["node_modules", name, "package.json"]); - - let mut file = match fs::File::open(package_json_path).await { - Ok(file) => file, - Err(err) => { - if err.kind() == io::ErrorKind::NotFound { - return Ok(None); - } + read_package_installed_version(local_package_directory.join("node_modules"), name).await + } +} - Err(err)? - } - }; +#[derive(Clone)] +pub struct SystemNodeRuntime { + node: PathBuf, + npm: PathBuf, + global_node_modules: PathBuf, + scratch_dir: PathBuf, +} - #[derive(Deserialize)] - struct PackageJson { - version: String, +impl SystemNodeRuntime { + const MIN_VERSION: semver::Version = Version::new(18, 0, 0); + async fn new(node: PathBuf, npm: PathBuf) -> Result> { + let output = Command::new(&node) + .arg("--version") + .output() + .await + .with_context(|| format!("running node from {:?}", node))?; + if !output.status.success() { + anyhow::bail!( + "failed to run node --version. stdout: {}, stderr: {}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ); + } + let version_str = String::from_utf8_lossy(&output.stdout); + let version = semver::Version::parse(version_str.trim().trim_start_matches('v'))?; + if version < Self::MIN_VERSION { + anyhow::bail!( + "node at {} is too old. want: {}, got: {}", + node.to_string_lossy(), + Self::MIN_VERSION, + version + ) } - let mut contents = String::new(); - file.read_to_string(&mut contents).await?; - let package_json: PackageJson = serde_json::from_str(&contents)?; - Ok(Some(package_json.version)) - } - - async fn npm_install_packages( - &self, - directory: &Path, - packages: &[(&str, &str)], - ) -> Result<()> { - let packages: Vec<_> = packages - .iter() - .map(|(name, version)| format!("{name}@{version}")) - .collect(); + let scratch_dir = paths::support_dir().join("node"); + fs::create_dir(&scratch_dir).await.ok(); + fs::create_dir(scratch_dir.join("cache")).await.ok(); + fs::write(scratch_dir.join("blank_user_npmrc"), []) + .await + .ok(); + fs::write(scratch_dir.join("blank_global_npmrc"), []) + .await + .ok(); - let mut arguments: Vec<_> = packages.iter().map(|p| p.as_str()).collect(); - arguments.extend_from_slice(&[ - "--save-exact", - "--fetch-retry-mintimeout", - "2000", - "--fetch-retry-maxtimeout", - "5000", - "--fetch-timeout", - "5000", - ]); + let mut this = Self { + node, + npm, + global_node_modules: PathBuf::default(), + scratch_dir, + }; + let output = this.run_npm_subcommand(None, None, "root", &["-g"]).await?; + this.global_node_modules = + PathBuf::from(String::from_utf8_lossy(&output.stdout).to_string()); - self.run_npm_subcommand(Some(directory), "install", &arguments) - .await?; - Ok(()) + Ok(Box::new(this)) } -} - -pub struct FakeNodeRuntime; -impl FakeNodeRuntime { - pub fn new() -> Arc { - Arc::new(Self) + async fn detect() -> Option> { + let node = which::which("node").ok()?; + let npm = which::which("npm").ok()?; + Self::new(node, npm).await.log_err() } } #[async_trait::async_trait] -impl NodeRuntime for FakeNodeRuntime { - async fn binary_path(&self) -> anyhow::Result { - unreachable!() +impl NodeRuntimeTrait for SystemNodeRuntime { + fn boxed_clone(&self) -> Box { + Box::new(self.clone()) } - async fn node_environment_path(&self) -> anyhow::Result { - unreachable!() + fn binary_path(&self) -> Result { + Ok(self.node.clone()) } async fn run_npm_subcommand( &self, - _: Option<&Path>, + directory: Option<&Path>, + proxy: Option<&Uri>, subcommand: &str, args: &[&str], ) -> anyhow::Result { - unreachable!("Should not run npm subcommand '{subcommand}' with args {args:?}") - } + let mut command = Command::new(self.node.clone()); + command + .env_clear() + .env("PATH", std::env::var_os("PATH").unwrap_or_default()) + .arg(self.npm.clone()) + .arg(subcommand) + .args(["--cache".into(), self.scratch_dir.join("cache")]) + .args([ + "--userconfig".into(), + self.scratch_dir.join("blank_user_npmrc"), + ]) + .args([ + "--globalconfig".into(), + self.scratch_dir.join("blank_global_npmrc"), + ]) + .args(args); + configure_npm_command(&mut command, directory, proxy); + let output = command.output().await?; + if !output.status.success() { + return Err(anyhow!( + "failed to execute npm {subcommand} subcommand:\nstdout: {:?}\nstderr: {:?}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + )); + } - async fn npm_package_latest_version(&self, name: &str) -> anyhow::Result { - unreachable!("Should not query npm package '{name}' for latest version") + Ok(output) } async fn npm_package_installed_version( &self, - _local_package_directory: &Path, + local_package_directory: &Path, name: &str, ) -> Result> { - unreachable!("Should not query npm package '{name}' for installed version") - } - - async fn npm_install_packages( - &self, - _: &Path, - packages: &[(&str, &str)], - ) -> anyhow::Result<()> { - unreachable!("Should not install packages {packages:?}") + read_package_installed_version(local_package_directory.join("node_modules"), name).await + // todo: allow returning a globally installed version (requires callers not to hard-code the path) } } -// TODO: Remove this when headless binary can run node -pub struct DummyNodeRuntime; +async fn read_package_installed_version( + node_module_directory: PathBuf, + name: &str, +) -> Result> { + let package_json_path = node_module_directory.join(name).join("package.json"); + + let mut file = match fs::File::open(package_json_path).await { + Ok(file) => file, + Err(err) => { + if err.kind() == io::ErrorKind::NotFound { + return Ok(None); + } + + Err(err)? + } + }; -impl DummyNodeRuntime { - pub fn new() -> Arc { - Arc::new(Self) + #[derive(Deserialize)] + struct PackageJson { + version: String, } + + let mut contents = String::new(); + file.read_to_string(&mut contents).await?; + let package_json: PackageJson = serde_json::from_str(&contents)?; + Ok(Some(package_json.version)) } +pub struct UnavailableNodeRuntime; + #[async_trait::async_trait] -impl NodeRuntime for DummyNodeRuntime { - async fn binary_path(&self) -> anyhow::Result { - anyhow::bail!("Dummy Node Runtime") +impl NodeRuntimeTrait for UnavailableNodeRuntime { + fn boxed_clone(&self) -> Box { + Box::new(UnavailableNodeRuntime) } - - async fn node_environment_path(&self) -> anyhow::Result { - anyhow::bail!("Dummy node runtime") + fn binary_path(&self) -> Result { + bail!("binary_path: no node runtime available") } async fn run_npm_subcommand( &self, _: Option<&Path>, - _subcommand: &str, - _args: &[&str], + _: Option<&Uri>, + _: &str, + _: &[&str], ) -> anyhow::Result { - anyhow::bail!("Dummy node runtime") - } - - async fn npm_package_latest_version(&self, _name: &str) -> anyhow::Result { - anyhow::bail!("Dummy node runtime") + bail!("run_npm_subcommand: no node runtime available") } async fn npm_package_installed_version( &self, _local_package_directory: &Path, - _name: &str, + _: &str, ) -> Result> { - anyhow::bail!("Dummy node runtime") + bail!("npm_package_installed_version: no node runtime available") } +} - async fn npm_install_packages( - &self, - _: &Path, - _packages: &[(&str, &str)], - ) -> anyhow::Result<()> { - anyhow::bail!("Dummy node runtime") +fn configure_npm_command(command: &mut Command, directory: Option<&Path>, proxy: Option<&Uri>) { + if let Some(directory) = directory { + command.current_dir(directory); + command.args(["--prefix".into(), directory.to_path_buf()]); + } + + if let Some(proxy) = proxy { + // Map proxy settings from `http://localhost:10809` to `http://127.0.0.1:10809` + // NodeRuntime without environment information can not parse `localhost` + // correctly. + // TODO: map to `[::1]` if we are using ipv6 + let proxy = proxy + .to_string() + .to_ascii_lowercase() + .replace("localhost", "127.0.0.1"); + + command.args(["--proxy", &proxy]); + } + + #[cfg(windows)] + { + // SYSTEMROOT is a critical environment variables for Windows. + if let Some(val) = std::env::var("SYSTEMROOT") + .context("Missing environment variable: SYSTEMROOT!") + .log_err() + { + command.env("SYSTEMROOT", val); + } + // Without ComSpec, the post-install will always fail. + if let Some(val) = std::env::var("ComSpec") + .context("Missing environment variable: ComSpec!") + .log_err() + { + command.env("ComSpec", val); + } + command.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0); } } diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 59ed915453996c..012beb3fd7ab28 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -138,7 +138,7 @@ impl Prettier { pub async fn start( _: LanguageServerId, prettier_dir: PathBuf, - _: Arc, + _: NodeRuntime, _: AsyncAppContext, ) -> anyhow::Result { Ok(Self::Test(TestPrettier { @@ -151,7 +151,7 @@ impl Prettier { pub async fn start( server_id: LanguageServerId, prettier_dir: PathBuf, - node: Arc, + node: NodeRuntime, cx: AsyncAppContext, ) -> anyhow::Result { use lsp::LanguageServerBinary; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6673f9da1ddd71..6c71d4baebf563 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -17,7 +17,7 @@ use async_trait::async_trait; use client::{proto, TypedEnvelope}; use collections::{btree_map, BTreeMap, HashMap, HashSet}; use futures::{ - future::{join_all, BoxFuture, Shared}, + future::{join_all, Shared}, select, stream::FuturesUnordered, AsyncWriteExt, Future, FutureExt, StreamExt, @@ -27,7 +27,7 @@ use gpui::{ AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel, }; -use http_client::{AsyncBody, HttpClient, Request, Response, Uri}; +use http_client::{BlockedHttpClient, HttpClient}; use language::{ language_settings::{ all_language_settings, language_settings, AllLanguageSettings, FormatOnSave, Formatter, @@ -7979,35 +7979,6 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { } } -struct BlockedHttpClient; - -impl HttpClient for BlockedHttpClient { - fn send( - &self, - _req: Request, - ) -> BoxFuture<'static, Result, anyhow::Error>> { - Box::pin(async { - Err(std::io::Error::new( - std::io::ErrorKind::PermissionDenied, - "ssh host blocked http connection", - ) - .into()) - }) - } - - fn proxy(&self) -> Option<&Uri> { - None - } - - fn send_with_redirect_policy( - &self, - req: Request, - _: bool, - ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.send(req) - } -} - struct SshLspAdapterDelegate { lsp_store: WeakModel, worktree: worktree::Snapshot, diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 82bd8464b2e535..65e2aa2e7673e8 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -30,7 +30,7 @@ use crate::{ }; pub struct PrettierStore { - node: Arc, + node: NodeRuntime, fs: Arc, languages: Arc, worktree_store: Model, @@ -52,7 +52,7 @@ impl EventEmitter for PrettierStore {} impl PrettierStore { pub fn new( - node: Arc, + node: NodeRuntime, fs: Arc, languages: Arc, worktree_store: Model, @@ -212,7 +212,7 @@ impl PrettierStore { } fn start_prettier( - node: Arc, + node: NodeRuntime, prettier_dir: PathBuf, worktree_id: Option, cx: &mut ModelContext, @@ -241,7 +241,7 @@ impl PrettierStore { } fn start_default_prettier( - node: Arc, + node: NodeRuntime, worktree_id: Option, cx: &mut ModelContext, ) -> Task> { @@ -749,7 +749,7 @@ impl DefaultPrettier { pub fn prettier_task( &mut self, - node: &Arc, + node: &NodeRuntime, worktree_id: Option, cx: &mut ModelContext, ) -> Option>> { @@ -767,7 +767,7 @@ impl DefaultPrettier { impl PrettierInstance { pub fn prettier_task( &mut self, - node: &Arc, + node: &NodeRuntime, prettier_dir: Option<&Path>, worktree_id: Option, cx: &mut ModelContext, @@ -786,7 +786,7 @@ impl PrettierInstance { None => match prettier_dir { Some(prettier_dir) => { let new_task = PrettierStore::start_prettier( - Arc::clone(node), + node.clone(), prettier_dir.to_path_buf(), worktree_id, cx, @@ -797,7 +797,7 @@ impl PrettierInstance { } None => { self.attempt += 1; - let node = Arc::clone(node); + let node = node.clone(); cx.spawn(|prettier_store, mut cx| async move { prettier_store .update(&mut cx, |_, cx| { @@ -818,7 +818,7 @@ impl PrettierInstance { async fn install_prettier_packages( fs: &dyn Fs, plugins_to_install: HashSet>, - node: Arc, + node: NodeRuntime, ) -> anyhow::Result<()> { let packages_to_versions = future::try_join_all( plugins_to_install diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index dc9337674b7eb7..0015af380292e1 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -153,7 +153,7 @@ pub struct Project { git_diff_debouncer: DebouncedDelay, remotely_created_models: Arc>, terminals: Terminals, - node: Option>, + node: Option, tasks: Model, hosted_project_id: Option, dev_server_project_id: Option, @@ -579,7 +579,7 @@ impl Project { pub fn local( client: Arc, - node: Arc, + node: NodeRuntime, user_store: Model, languages: Arc, fs: Arc, @@ -675,7 +675,7 @@ impl Project { pub fn ssh( ssh: Arc, client: Arc, - node: Arc, + node: NodeRuntime, user_store: Model, languages: Arc, fs: Arc, @@ -1064,7 +1064,7 @@ impl Project { .update(|cx| { Project::local( client, - node_runtime::FakeNodeRuntime::new(), + node_runtime::NodeRuntime::unavailable(), user_store, Arc::new(languages), fs, @@ -1104,7 +1104,7 @@ impl Project { let project = cx.update(|cx| { Project::local( client, - node_runtime::FakeNodeRuntime::new(), + node_runtime::NodeRuntime::unavailable(), user_store, Arc::new(languages), fs, @@ -1157,7 +1157,7 @@ impl Project { self.user_store.clone() } - pub fn node_runtime(&self) -> Option<&Arc> { + pub fn node_runtime(&self) -> Option<&NodeRuntime> { self.node.as_ref() } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 904efe0a6b01f0..d6f5600a551ef4 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -34,6 +34,10 @@ pub struct ProjectSettings { #[serde(default)] pub git: GitSettings, + /// Configuration for Node-related features + #[serde(default)] + pub node: NodeBinarySettings, + /// Configuration for how direnv configuration should be loaded #[serde(default)] pub load_direnv: DirenvSettings, @@ -43,6 +47,17 @@ pub struct ProjectSettings { pub session: SessionSettings, } +#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema)] +pub struct NodeBinarySettings { + /// The path to the node binary + pub path: Option, + /// The path to the npm binary Zed should use (defaults to .path/../npm) + pub npm_path: Option, + /// If disabled, zed will download its own copy of node. + #[serde(default)] + pub disable_path_lookup: Option, +} + #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum DirenvSettings { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 0d644a64a6aa7a..87c9583077c4d9 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -2,7 +2,7 @@ use anyhow::{anyhow, Result}; use fs::Fs; use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext}; use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry}; -use node_runtime::DummyNodeRuntime; +use node_runtime::NodeRuntime; use project::{ buffer_store::{BufferStore, BufferStoreEvent}, project_settings::SettingsObserver, @@ -57,7 +57,7 @@ impl HeadlessProject { }); let prettier_store = cx.new_model(|cx| { PrettierStore::new( - DummyNodeRuntime::new(), + NodeRuntime::unavailable(), fs.clone(), languages.clone(), worktree_store.clone(), diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index b5ab1c40070a09..ba59d310c81b88 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -9,7 +9,7 @@ use language::{ Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LanguageServerName, }; use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind}; -use node_runtime::FakeNodeRuntime; +use node_runtime::NodeRuntime; use project::{ search::{SearchQuery, SearchResult}, Project, @@ -502,7 +502,7 @@ fn build_project(ssh: Arc, cx: &mut TestAppContext) -> Model, pub fs: Arc, pub build_window_options: fn(Option, &mut AppContext) -> WindowOptions, - pub node_runtime: Arc, + pub node_runtime: NodeRuntime, pub session: Model, } @@ -590,7 +590,7 @@ impl AppState { #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut AppContext) -> Arc { - use node_runtime::FakeNodeRuntime; + use node_runtime::NodeRuntime; use session::Session; use settings::SettingsStore; use ui::Context as _; @@ -619,7 +619,7 @@ impl AppState { languages, user_store, workspace_store, - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), build_window_options: |_, _| Default::default(), session, }) @@ -4418,7 +4418,7 @@ impl Workspace { #[cfg(any(test, feature = "test-support"))] pub fn test_new(project: Model, cx: &mut ViewContext) -> Self { - use node_runtime::FakeNodeRuntime; + use node_runtime::NodeRuntime; use session::Session; let client = project.read(cx).client(); @@ -4434,7 +4434,7 @@ impl Workspace { user_store, fs: project.read(cx).fs().clone(), build_window_options: |_, _| Default::default(), - node_runtime: FakeNodeRuntime::new(), + node_runtime: NodeRuntime::unavailable(), session, }); let workspace = Self::new(Default::default(), project, app_state, cx); diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index ad2e7cd48c67d8..65724480f62334 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -19,6 +19,7 @@ activity_indicator.workspace = true anyhow.workspace = true assets.workspace = true assistant.workspace = true +async-watch.workspace = true audio.workspace = true auto_update.workspace = true backtrace = "0.3" @@ -92,6 +93,7 @@ serde_json.workspace = true session.workspace = true settings.workspace = true settings_ui.workspace = true +shellexpand.workspace = true simplelog.workspace = true smol.workspace = true snippet_provider.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index d3eb97c9aa506e..309931f6163528 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -29,8 +29,9 @@ use language::LanguageRegistry; use log::LevelFilter; use assets::Assets; -use node_runtime::RealNodeRuntime; +use node_runtime::{NodeBinaryOptions, NodeRuntime}; use parking_lot::Mutex; +use project::project_settings::ProjectSettings; use recent_projects::open_ssh_project; use release_channel::{AppCommitSha, AppVersion}; use session::{AppSession, Session}; @@ -43,7 +44,7 @@ use std::{ env, fs::OpenOptions, io::{IsTerminal, Write}, - path::Path, + path::{Path, PathBuf}, process, sync::Arc, }; @@ -477,7 +478,32 @@ fn main() { let mut languages = LanguageRegistry::new(cx.background_executor().clone()); languages.set_language_server_download_dir(paths::languages_dir().clone()); let languages = Arc::new(languages); - let node_runtime = RealNodeRuntime::new(client.http_client()); + let (tx, rx) = async_watch::channel(None); + cx.observe_global::(move |cx| { + let settings = &ProjectSettings::get_global(cx).node; + let options = NodeBinaryOptions { + allow_path_lookup: !settings.disable_path_lookup.unwrap_or_default(), + // TODO: Expose this setting + allow_binary_download: true, + use_paths: settings.path.as_ref().map(|node_path| { + let node_path = PathBuf::from(shellexpand::tilde(node_path).as_ref()); + let npm_path = settings + .npm_path + .as_ref() + .map(|path| PathBuf::from(shellexpand::tilde(&path).as_ref())); + ( + node_path.clone(), + npm_path.unwrap_or_else(|| { + let base_path = PathBuf::new(); + node_path.parent().unwrap_or(&base_path).join("npm") + }), + ) + }), + }; + tx.send(Some(options)).log_err(); + }) + .detach(); + let node_runtime = NodeRuntime::new(client.http_client(), rx); language::init(cx); languages::init(languages.clone(), node_runtime.clone(), cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index b0e023f42e0a01..8f4f1af24331c0 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -3365,7 +3365,7 @@ mod tests { cx.set_global(settings); let languages = LanguageRegistry::test(cx.executor()); let languages = Arc::new(languages); - let node_runtime = node_runtime::FakeNodeRuntime::new(); + let node_runtime = node_runtime::NodeRuntime::unavailable(); cx.update(|cx| { languages::init(languages.clone(), node_runtime, cx); }); From d989183f94725f4b2f42c6e7db79e37e0fdbddd5 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 23 Sep 2024 16:21:24 -0600 Subject: [PATCH 283/762] Remove `Debug` constraint on `SumTree` (and its related traits/structs) (#18248) Release Notes: - N/A Co-authored-by: Nathan --- crates/editor/src/selections_collection.rs | 4 +- crates/sum_tree/src/cursor.rs | 6 +-- crates/sum_tree/src/sum_tree.rs | 53 ++++++++++++++++++++-- crates/sum_tree/src/tree_map.rs | 34 +++++++------- 4 files changed, 69 insertions(+), 28 deletions(-) diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index 35df9c1b53f72d..c85e60fdaa92e5 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -109,7 +109,7 @@ impl SelectionsCollection { pub fn all<'a, D>(&self, cx: &AppContext) -> Vec> where - D: 'a + TextDimension + Ord + Sub + std::fmt::Debug, + D: 'a + TextDimension + Ord + Sub, { let disjoint_anchors = &self.disjoint; let mut disjoint = @@ -850,7 +850,7 @@ pub(crate) fn resolve_multiple<'a, D, I>( snapshot: &MultiBufferSnapshot, ) -> impl 'a + Iterator> where - D: TextDimension + Ord + Sub + std::fmt::Debug, + D: TextDimension + Ord + Sub, I: 'a + IntoIterator>, { let (to_summarize, selections) = selections.into_iter().tee(); diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 6da43a8de5ce39..773e7db88bad3d 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -431,11 +431,9 @@ where aggregate: &mut dyn SeekAggregate<'a, T>, cx: &::Context, ) -> bool { - debug_assert!( + assert!( target.cmp(&self.position, cx) >= Ordering::Equal, - "cannot seek backward from {:?} to {:?}", - self.position, - target + "cannot seek backward", ); if !self.did_seek { diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index ca351d67cea76f..965413d3190aa2 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -34,7 +34,7 @@ pub trait KeyedItem: Item { /// /// Each Summary type can have multiple [`Dimensions`] that it measures, /// which can be used to navigate the tree -pub trait Summary: Clone + fmt::Debug { +pub trait Summary: Clone { type Context; fn zero(cx: &Self::Context) -> Self; @@ -49,7 +49,7 @@ pub trait Summary: Clone + fmt::Debug { /// # Example: /// Zed's rope has a `TextSummary` type that summarizes lines, characters, and bytes. /// Each of these are different dimensions we may want to seek to -pub trait Dimension<'a, S: Summary>: Clone + fmt::Debug { +pub trait Dimension<'a, S: Summary>: Clone { fn zero(cx: &S::Context) -> Self; fn add_summary(&mut self, summary: &'a S, cx: &S::Context); @@ -71,7 +71,7 @@ impl<'a, T: Summary> Dimension<'a, T> for T { } } -pub trait SeekTarget<'a, S: Summary, D: Dimension<'a, S>>: fmt::Debug { +pub trait SeekTarget<'a, S: Summary, D: Dimension<'a, S>> { fn cmp(&self, cursor_location: &D, cx: &S::Context) -> Ordering; } @@ -173,9 +173,19 @@ impl Bias { /// The maximum number of items per node is `TREE_BASE * 2`. /// /// Any [`Dimension`] supported by the [`Summary`] type can be used to seek to a specific location in the tree. -#[derive(Debug, Clone)] +#[derive(Clone)] pub struct SumTree(Arc>); +impl fmt::Debug for SumTree +where + T: fmt::Debug + Item, + T::Summary: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_tuple("SumTree").field(&self.0).finish() + } +} + impl SumTree { pub fn new(cx: &::Context) -> Self { SumTree(Arc::new(Node::Leaf { @@ -763,7 +773,7 @@ where } } -#[derive(Clone, Debug)] +#[derive(Clone)] pub enum Node { Internal { height: u8, @@ -778,6 +788,39 @@ pub enum Node { }, } +impl fmt::Debug for Node +where + T: Item + fmt::Debug, + T::Summary: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Node::Internal { + height, + summary, + child_summaries, + child_trees, + } => f + .debug_struct("Internal") + .field("height", height) + .field("summary", summary) + .field("child_summaries", child_summaries) + .field("child_trees", child_trees) + .finish(), + Node::Leaf { + summary, + items, + item_summaries, + } => f + .debug_struct("Leaf") + .field("summary", summary) + .field("items", items) + .field("item_summaries", item_summaries) + .finish(), + } + } +} + impl Node { fn is_leaf(&self) -> bool { matches!(self, Node::Leaf { .. }) diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 72465b1a99cabd..b7eadb566d3ed7 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -5,8 +5,8 @@ use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary #[derive(Clone, PartialEq, Eq)] pub struct TreeMap(SumTree>) where - K: Clone + Debug + Ord, - V: Clone + Debug; + K: Clone + Ord, + V: Clone; #[derive(Clone, Debug, PartialEq, Eq)] pub struct MapEntry { @@ -35,9 +35,9 @@ impl<'a, K> Default for MapKeyRef<'a, K> { #[derive(Clone)] pub struct TreeSet(TreeMap) where - K: Clone + Debug + Ord; + K: Clone + Ord; -impl TreeMap { +impl TreeMap { pub fn from_ordered_entries(entries: impl IntoIterator) -> Self { let tree = SumTree::from_iter( entries @@ -172,7 +172,7 @@ impl TreeMap { } } -impl Debug for TreeMap +impl Debug for TreeMap where K: Clone + Debug + Ord, V: Clone + Debug, @@ -185,7 +185,7 @@ where #[derive(Debug)] struct MapSeekTargetAdaptor<'a, T>(&'a T); -impl<'a, K: Debug + Clone + Ord, T: MapSeekTarget> SeekTarget<'a, MapKey, MapKeyRef<'a, K>> +impl<'a, K: Clone + Ord, T: MapSeekTarget> SeekTarget<'a, MapKey, MapKeyRef<'a, K>> for MapSeekTargetAdaptor<'_, T> { fn cmp(&self, cursor_location: &MapKeyRef, _: &()) -> Ordering { @@ -197,11 +197,11 @@ impl<'a, K: Debug + Clone + Ord, T: MapSeekTarget> SeekTarget<'a, MapKey, } } -pub trait MapSeekTarget: Debug { +pub trait MapSeekTarget { fn cmp_cursor(&self, cursor_location: &K) -> Ordering; } -impl MapSeekTarget for K { +impl MapSeekTarget for K { fn cmp_cursor(&self, cursor_location: &K) -> Ordering { self.cmp(cursor_location) } @@ -209,8 +209,8 @@ impl MapSeekTarget for K { impl Default for TreeMap where - K: Clone + Debug + Ord, - V: Clone + Debug, + K: Clone + Ord, + V: Clone, { fn default() -> Self { Self(Default::default()) @@ -219,7 +219,7 @@ where impl Item for MapEntry where - K: Clone + Debug + Ord, + K: Clone + Ord, V: Clone, { type Summary = MapKey; @@ -231,7 +231,7 @@ where impl KeyedItem for MapEntry where - K: Clone + Debug + Ord, + K: Clone + Ord, V: Clone, { type Key = MapKey; @@ -243,7 +243,7 @@ where impl Summary for MapKey where - K: Clone + Debug, + K: Clone, { type Context = (); @@ -258,7 +258,7 @@ where impl<'a, K> Dimension<'a, MapKey> for MapKeyRef<'a, K> where - K: Clone + Debug + Ord, + K: Clone + Ord, { fn zero(_cx: &()) -> Self { Default::default() @@ -271,7 +271,7 @@ where impl<'a, K> SeekTarget<'a, MapKey, MapKeyRef<'a, K>> for MapKeyRef<'_, K> where - K: Clone + Debug + Ord, + K: Clone + Ord, { fn cmp(&self, cursor_location: &MapKeyRef, _: &()) -> Ordering { Ord::cmp(&self.0, &cursor_location.0) @@ -280,7 +280,7 @@ where impl Default for TreeSet where - K: Clone + Debug + Ord, + K: Clone + Ord, { fn default() -> Self { Self(Default::default()) @@ -289,7 +289,7 @@ where impl TreeSet where - K: Clone + Debug + Ord, + K: Clone + Ord, { pub fn from_ordered_entries(entries: impl IntoIterator) -> Self { Self(TreeMap::from_ordered_entries( From 20c06545b6c9d51e6329f3194bc89123b7f7f9f4 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 23 Sep 2024 15:47:25 -0700 Subject: [PATCH 284/762] SSH Remoting: Add the ability to resolve file paths on the remote host (#18250) Release Notes: - N/A --- crates/project/src/project.rs | 67 +++++++++++-------- crates/proto/proto/zed.proto | 15 ++++- crates/proto/src/proto.rs | 8 ++- crates/remote_server/src/headless_project.rs | 17 +++++ .../remote_server/src/remote_editing_tests.rs | 45 ++++++++++++- 5 files changed, 119 insertions(+), 33 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 0015af380292e1..199b5a8f5c5755 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3037,15 +3037,11 @@ impl Project { buffer: &Model, cx: &mut ModelContext, ) -> Task> { - // TODO: ssh based remoting. - if self.ssh_session.is_some() { - return Task::ready(None); - } - - if self.is_local_or_ssh() { - let expanded = PathBuf::from(shellexpand::tilde(&path).into_owned()); + let path_buf = PathBuf::from(path); + if path_buf.is_absolute() || path.starts_with("~") { + if self.is_local() { + let expanded = PathBuf::from(shellexpand::tilde(&path).into_owned()); - if expanded.is_absolute() { let fs = self.fs.clone(); cx.background_executor().spawn(async move { let path = expanded.as_path(); @@ -3053,16 +3049,24 @@ impl Project { exists.then(|| ResolvedPath::AbsPath(expanded)) }) + } else if let Some(ssh_session) = self.ssh_session.as_ref() { + let request = ssh_session.request(proto::CheckFileExists { + project_id: SSH_PROJECT_ID, + path: path.to_string(), + }); + cx.background_executor().spawn(async move { + let response = request.await.log_err()?; + if response.exists { + Some(ResolvedPath::AbsPath(PathBuf::from(response.path))) + } else { + None + } + }) } else { - self.resolve_path_in_worktrees(expanded, buffer, cx) - } - } else { - let path = PathBuf::from(path); - if path.is_absolute() || path.starts_with("~") { return Task::ready(None); } - - self.resolve_path_in_worktrees(path, buffer, cx) + } else { + self.resolve_path_in_worktrees(path_buf, buffer, cx) } } @@ -4016,17 +4020,7 @@ impl Project { } pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec { - self.worktrees(cx) - .map(|worktree| { - let worktree = worktree.read(cx); - proto::WorktreeMetadata { - id: worktree.id().to_proto(), - root_name: worktree.root_name().into(), - visible: worktree.is_visible(), - abs_path: worktree.abs_path().to_string_lossy().into(), - } - }) - .collect() + self.worktree_store.read(cx).worktree_metadata_protos(cx) } fn set_worktrees_from_proto( @@ -4035,10 +4029,9 @@ impl Project { cx: &mut ModelContext, ) -> Result<()> { cx.notify(); - let result = self.worktree_store.update(cx, |worktree_store, cx| { + self.worktree_store.update(cx, |worktree_store, cx| { worktree_store.set_worktrees_from_proto(worktrees, self.replica_id(), cx) - }); - result + }) } fn set_collaborators_from_proto( @@ -4547,6 +4540,22 @@ pub enum ResolvedPath { AbsPath(PathBuf), } +impl ResolvedPath { + pub fn abs_path(&self) -> Option<&Path> { + match self { + Self::AbsPath(path) => Some(path.as_path()), + _ => None, + } + } + + pub fn project_path(&self) -> Option<&ProjectPath> { + match self { + Self::ProjectPath(path) => Some(&path), + _ => None, + } + } +} + impl Item for Buffer { fn try_open( project: &Model, diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index a18bbe8ecf5141..475ed139edfb87 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -293,7 +293,10 @@ message Envelope { TryExec try_exec = 252; ReadTextFile read_text_file = 253; - ReadTextFileResponse read_text_file_response = 254; // current max + ReadTextFileResponse read_text_file_response = 254; + + CheckFileExists check_file_exists = 255; + CheckFileExistsResponse check_file_exists_response = 256; // current max } reserved 158 to 161; @@ -2574,3 +2577,13 @@ message TryExec { message TryExecResponse { string text = 1; } + +message CheckFileExists { + uint64 project_id = 1; + string path = 2; +} + +message CheckFileExistsResponse { + bool exists = 1; + string path = 2; +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index b5a00d16704c41..4146a47409ad71 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -372,7 +372,9 @@ messages!( (ShellEnvResponse, Foreground), (TryExec, Foreground), (ReadTextFile, Foreground), - (ReadTextFileResponse, Foreground) + (ReadTextFileResponse, Foreground), + (CheckFileExists, Background), + (CheckFileExistsResponse, Background) ); request_messages!( @@ -501,6 +503,7 @@ request_messages!( (ShellEnv, ShellEnvResponse), (ReadTextFile, ReadTextFileResponse), (TryExec, Ack), + (CheckFileExists, CheckFileExistsResponse) ); entity_messages!( @@ -578,7 +581,8 @@ entity_messages!( WhichCommand, ShellEnv, TryExec, - ReadTextFile + ReadTextFile, + CheckFileExists, ); entity_messages!( diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 87c9583077c4d9..043f7e95ee026c 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -108,6 +108,7 @@ impl HeadlessProject { session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer); client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory); + client.add_request_handler(cx.weak_model(), Self::handle_check_file_exists); client.add_model_request_handler(Self::handle_add_worktree); client.add_model_request_handler(Self::handle_open_buffer_by_path); @@ -298,4 +299,20 @@ impl HeadlessProject { } Ok(proto::ListRemoteDirectoryResponse { entries }) } + + pub async fn handle_check_file_exists( + this: Model, + envelope: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let fs = cx.read_model(&this, |this, _| this.fs.clone())?; + let expanded = shellexpand::tilde(&envelope.payload.path).to_string(); + + let exists = fs.is_file(&PathBuf::from(expanded.clone())).await; + + Ok(proto::CheckFileExistsResponse { + exists, + path: expanded, + }) + } } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index ba59d310c81b88..18eb12b445b97b 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -12,7 +12,7 @@ use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind}; use node_runtime::NodeRuntime; use project::{ search::{SearchQuery, SearchResult}, - Project, + Project, ProjectPath, }; use remote::SshSession; use serde_json::json; @@ -440,6 +440,49 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext }) } +#[gpui::test] +async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let (project, _headless, _fs) = init_test(cx, server_cx).await; + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + + let worktree_id = cx.update(|cx| worktree.read(cx).id()); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + }) + .await + .unwrap(); + + let path = project + .update(cx, |project, cx| { + project.resolve_existing_file_path("/code/project1/README.md", &buffer, cx) + }) + .await + .unwrap(); + assert_eq!( + path.abs_path().unwrap().to_string_lossy(), + "/code/project1/README.md" + ); + + let path = project + .update(cx, |project, cx| { + project.resolve_existing_file_path("../README.md", &buffer, cx) + }) + .await + .unwrap(); + + assert_eq!( + path.project_path().unwrap().clone(), + ProjectPath::from((worktree_id, "README.md")) + ); +} + fn init_logger() { if std::env::var("RUST_LOG").is_ok() { env_logger::try_init().ok(); From 6b56530a4ab0b45d072ce5fe2c19e10e8cd3f58b Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 23 Sep 2024 23:53:28 +0000 Subject: [PATCH 285/762] lua: Bump to v0.1.0 (#18246) This PR bumps the Lua extension to v0.1.0 - https://github.com/zed-industries/zed/pull/18199 - https://github.com/zed-industries/zed/pull/16955 --- Cargo.lock | 2 +- extensions/lua/Cargo.toml | 2 +- extensions/lua/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 894dd00f6d7f79..09a68973340a57 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14621,7 +14621,7 @@ dependencies = [ [[package]] name = "zed_lua" -version = "0.0.3" +version = "0.1.0" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/lua/Cargo.toml b/extensions/lua/Cargo.toml index ace7f4700caebf..f577ce18712c4b 100644 --- a/extensions/lua/Cargo.toml +++ b/extensions/lua/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_lua" -version = "0.0.3" +version = "0.1.0" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/lua/extension.toml b/extensions/lua/extension.toml index cd00bbc7c1ad7a..82026f48ba90dc 100644 --- a/extensions/lua/extension.toml +++ b/extensions/lua/extension.toml @@ -1,7 +1,7 @@ id = "lua" name = "Lua" description = "Lua support." -version = "0.0.3" +version = "0.1.0" schema_version = 1 authors = ["Max Brunsfeld "] repository = "https://github.com/zed-industries/zed" From dbc325ea12b7c06183149e472008f67a2ce0ce5f Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 24 Sep 2024 09:52:20 +0200 Subject: [PATCH 286/762] vtsls: Move all default configuration to workspace_configuration (#18259) This fixes https://github.com/zed-industries/zed/issues/18014 by fixing the regression that was introduced in https://github.com/zed-industries/zed/pull/17757. In short: after digging into the `vtsls` code, it looks like it essentially doesn't need any `initialization_options`, it's all workspace configuration, since it tries to use the built-in settings from VS Code. I tested the completions, the inlay hints, the max memory - all of it now works after moving to `workspace_configuration`. Closes #18014. Release Notes: - Fixed `vtsls` being initialized the wrong way, which would mean the wrong options were used to enable completions or inlay hints. --- crates/languages/src/vtsls.rs | 43 ++++++--------- docs/src/languages/typescript.md | 93 +++++++++++++++++++++++++++----- 2 files changed, 96 insertions(+), 40 deletions(-) diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 3c1cf0fcbe151b..de6d575a8ee9fd 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -6,14 +6,14 @@ use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; -use serde_json::{json, Value}; +use serde_json::Value; use std::{ any::Any, ffi::OsString, path::{Path, PathBuf}, sync::Arc, }; -use util::{maybe, ResultExt}; +use util::{maybe, merge_json_value_into, ResultExt}; fn typescript_server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -212,11 +212,12 @@ impl LspAdapter for VtslsLspAdapter { }) } - async fn initialization_options( + async fn workspace_configuration( self: Arc, - adapter: &Arc, - ) -> Result> { - let tsdk_path = Self::tsdk_path(adapter).await; + delegate: &Arc, + cx: &mut AsyncAppContext, + ) -> Result { + let tsdk_path = Self::tsdk_path(delegate).await; let config = serde_json::json!({ "tsdk": tsdk_path, "suggest": { @@ -243,10 +244,13 @@ impl LspAdapter for VtslsLspAdapter { "enumMemberValues": { "enabled": true } - } + }, + "tsserver": { + "maxTsServerMemory": 8092 + }, }); - Ok(Some(json!({ + let mut default_workspace_configuration = serde_json::json!({ "typescript": config, "javascript": config, "vtsls": { @@ -258,33 +262,18 @@ impl LspAdapter for VtslsLspAdapter { }, "autoUseWorkspaceTsdk": true } - }))) - } + }); - async fn workspace_configuration( - self: Arc, - delegate: &Arc, - cx: &mut AsyncAppContext, - ) -> Result { let override_options = cx.update(|cx| { language_server_settings(delegate.as_ref(), &SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; - if let Some(options) = override_options { - return Ok(options); + if let Some(override_options) = override_options { + merge_json_value_into(override_options, &mut default_workspace_configuration) } - let config = serde_json::json!({ - "tsserver": { - "maxTsServerMemory": 8092 - }, - }); - - Ok(serde_json::json!({ - "typescript": config, - "javascript": config - })) + Ok(default_workspace_configuration) } fn language_ids(&self) -> HashMap { diff --git a/docs/src/languages/typescript.md b/docs/src/languages/typescript.md index 080d41efb33c28..fa9827cb06f89e 100644 --- a/docs/src/languages/typescript.md +++ b/docs/src/languages/typescript.md @@ -68,21 +68,25 @@ Prettier will also be used for TypeScript files by default. To disable this: Zed sets the following initialization options to make the language server send back inlay hints (that is, when Zed has inlay hints enabled in the settings). -You can override these settings in your configuration file: +You can override these settings in your Zed settings file. + +When using `typescript-language-server`: ```json -"lsp": { - "$LANGUAGE_SERVER_NAME": { - "initialization_options": { - "preferences": { - "includeInlayParameterNameHints": "all", - "includeInlayParameterNameHintsWhenArgumentMatchesName": true, - "includeInlayFunctionParameterTypeHints": true, - "includeInlayVariableTypeHints": true, - "includeInlayVariableTypeHintsWhenTypeMatchesName": true, - "includeInlayPropertyDeclarationTypeHints": true, - "includeInlayFunctionLikeReturnTypeHints": true, - "includeInlayEnumMemberValueHints": true, +{ + "lsp": { + "typescript-language-server": { + "initialization_options": { + "preferences": { + "includeInlayParameterNameHints": "all", + "includeInlayParameterNameHintsWhenArgumentMatchesName": true, + "includeInlayFunctionParameterTypeHints": true, + "includeInlayVariableTypeHints": true, + "includeInlayVariableTypeHintsWhenTypeMatchesName": true, + "includeInlayPropertyDeclarationTypeHints": true, + "includeInlayFunctionLikeReturnTypeHints": true, + "includeInlayEnumMemberValueHints": true + } } } } @@ -91,6 +95,69 @@ You can override these settings in your configuration file: See [typescript-language-server inlayhints documentation](https://github.com/typescript-language-server/typescript-language-server?tab=readme-ov-file#inlay-hints-textdocumentinlayhint) for more information. +When using `vtsls`: + +```json +{ + "lsp": { + "vtsls": { + "settings": { + // For JavaScript: + "javascript": { + "inlayHints": { + "parameterNames": { + "enabled": "all", + "suppressWhenArgumentMatchesName": false + }, + "parameterTypes": { + "enabled": true + }, + "variableTypes": { + "enabled": true, + "suppressWhenTypeMatchesName": true + }, + "propertyDeclarationTypes": { + "enabled": true + }, + "functionLikeReturnTypes": { + "enabled": true + }, + "enumMemberValues": { + "enabled": true + } + } + }, + // For TypeScript: + "typescript": { + "inlayHints": { + "parameterNames": { + "enabled": "all", + "suppressWhenArgumentMatchesName": false + }, + "parameterTypes": { + "enabled": true + }, + "variableTypes": { + "enabled": true, + "suppressWhenTypeMatchesName": true + }, + "propertyDeclarationTypes": { + "enabled": true + }, + "functionLikeReturnTypes": { + "enabled": true + }, + "enumMemberValues": { + "enabled": true + } + } + } + } + } + } +} +``` + ## See also - [Zed Yarn documentation](./yarn.md) for a walkthrough of configuring your project to use Yarn. From 399e094f021561a51e3e2ff76993bfb0cef0a5c2 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 24 Sep 2024 15:36:05 +0300 Subject: [PATCH 287/762] Properly use default search options in the buffer search bar (#18271) Only replace current search options if the search was dismissed and the new options are different from the default ones. Follow-up of https://github.com/zed-industries/zed/pull/17179 Closes https://github.com/zed-industries/zed/issues/18166 Release Notes: - Fixed buffer search options toggling unexpectedly on redeploys ([#18166](https://github.com/zed-industries/zed/issues/18166)) --- crates/search/src/buffer_search.rs | 132 +++++++++++++++++++++++++++-- 1 file changed, 126 insertions(+), 6 deletions(-) diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 3a7cccfbb916c9..1c37bfd481dcb9 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -87,6 +87,7 @@ pub struct BufferSearchBar { pending_search: Option>, search_options: SearchOptions, default_options: SearchOptions, + configured_options: SearchOptions, query_contains_error: bool, dismissed: bool, search_history: SearchHistory, @@ -517,6 +518,7 @@ impl BufferSearchBar { active_match_index: None, searchable_items_with_matches: Default::default(), default_options: search_options, + configured_options: search_options, search_options, pending_search: None, query_contains_error: false, @@ -605,10 +607,11 @@ impl BufferSearchBar { return false; }; - self.default_options = SearchOptions::from_settings(&EditorSettings::get_global(cx).search); - - if self.default_options != self.search_options { - self.search_options = self.default_options; + self.configured_options = + SearchOptions::from_settings(&EditorSettings::get_global(cx).search); + if self.dismissed && self.configured_options != self.default_options { + self.search_options = self.configured_options; + self.default_options = self.configured_options; } self.dismissed = false; @@ -627,6 +630,7 @@ impl BufferSearchBar { .map(SearchableItemHandle::supported_options) .unwrap_or_default() } + pub fn search_suggested(&mut self, cx: &mut ViewContext) { let search = self .query_suggestion(cx) @@ -1195,10 +1199,11 @@ mod tests { use std::ops::Range; use super::*; - use editor::{display_map::DisplayRow, DisplayPoint, Editor, MultiBuffer}; - use gpui::{Context, Hsla, TestAppContext, VisualTestContext}; + use editor::{display_map::DisplayRow, DisplayPoint, Editor, MultiBuffer, SearchSettings}; + use gpui::{Context, Hsla, TestAppContext, UpdateGlobal, VisualTestContext}; use language::{Buffer, Point}; use project::Project; + use settings::SettingsStore; use smol::stream::StreamExt as _; use unindent::Unindent as _; @@ -2320,4 +2325,119 @@ mod tests { assert!(display_points_of(editor.all_text_background_highlights(cx)).is_empty(),); }); } + + #[gpui::test] + async fn test_search_options_changes(cx: &mut TestAppContext) { + let (_editor, search_bar, cx) = init_test(cx); + update_search_settings( + SearchSettings { + whole_word: false, + case_sensitive: false, + include_ignored: false, + regex: false, + }, + cx, + ); + + let deploy = Deploy { + focus: true, + replace_enabled: false, + selection_search_enabled: true, + }; + + search_bar.update(cx, |search_bar, cx| { + assert_eq!( + search_bar.search_options, + SearchOptions::NONE, + "Should have no search options enabled by default" + ); + search_bar.toggle_search_option(SearchOptions::WHOLE_WORD, cx); + assert_eq!( + search_bar.search_options, + SearchOptions::WHOLE_WORD, + "Should enable the option toggled" + ); + assert!( + !search_bar.dismissed, + "Search bar should be present and visible" + ); + search_bar.deploy(&deploy, cx); + assert_eq!( + search_bar.configured_options, + SearchOptions::NONE, + "Should have configured search options matching the settings" + ); + assert_eq!( + search_bar.search_options, + SearchOptions::WHOLE_WORD, + "After (re)deploying, the option should still be enabled" + ); + + search_bar.dismiss(&Dismiss, cx); + search_bar.deploy(&deploy, cx); + assert_eq!( + search_bar.search_options, + SearchOptions::NONE, + "After hiding and showing the search bar, default options should be used" + ); + + search_bar.toggle_search_option(SearchOptions::REGEX, cx); + search_bar.toggle_search_option(SearchOptions::WHOLE_WORD, cx); + assert_eq!( + search_bar.search_options, + SearchOptions::REGEX | SearchOptions::WHOLE_WORD, + "Should enable the options toggled" + ); + assert!( + !search_bar.dismissed, + "Search bar should be present and visible" + ); + }); + + update_search_settings( + SearchSettings { + whole_word: false, + case_sensitive: true, + include_ignored: false, + regex: false, + }, + cx, + ); + search_bar.update(cx, |search_bar, cx| { + assert_eq!( + search_bar.search_options, + SearchOptions::REGEX | SearchOptions::WHOLE_WORD, + "Should have no search options enabled by default" + ); + + search_bar.deploy(&deploy, cx); + assert_eq!( + search_bar.configured_options, + SearchOptions::CASE_SENSITIVE, + "Should have configured search options matching the settings" + ); + assert_eq!( + search_bar.search_options, + SearchOptions::REGEX | SearchOptions::WHOLE_WORD, + "Toggling a non-dismissed search bar with custom options should not change the default options" + ); + search_bar.dismiss(&Dismiss, cx); + search_bar.deploy(&deploy, cx); + assert_eq!( + search_bar.search_options, + SearchOptions::CASE_SENSITIVE, + "After hiding and showing the search bar, default options should be used" + ); + }); + } + + fn update_search_settings(search_settings: SearchSettings, cx: &mut TestAppContext) { + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings::(cx, |settings| { + settings.search = Some(search_settings); + }); + }); + }); + } } From f019ad563f643a03e83083f04f23d38d62dd1181 Mon Sep 17 00:00:00 2001 From: Boris Verkhovskiy Date: Tue, 24 Sep 2024 06:46:06 -0600 Subject: [PATCH 288/762] Don't highlight Python function arguments as variables (#18252) Works on - #14892 Follow up to - #17473 - https://github.com/zed-industries/zed/pull/17984#issuecomment-2369815207 Release Notes: - N/A --- crates/languages/src/python/highlights.scm | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index 3255677bedc428..5edbefa7be747d 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -1,4 +1,3 @@ -(parameter (identifier) @variable) (attribute attribute: (identifier) @property) (type (identifier) @type) From 93a4295f66c5a4c393e861deac7dcfb8c8dd45d2 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 24 Sep 2024 15:03:22 +0200 Subject: [PATCH 289/762] project search: Fix search results not being highlighted (#18273) Closes #18254 Closes #18219 Closes #17690 This fixes the project search not highlighting all results. The problem was relatively simple, even though it took a while to find it: we inserted multiple excerpts concurrently and the order in the multi-buffer ended up being wrong. Sorting the resulting `match_ranges` fixed the problem, but as it turns out, we can do a better job by moving the concurrency into the method on the MultiBuffer. Performance is the same, but now the problem is fixed. Release Notes: - Fixed search results in project-wide search not being highlighted consistently and navigation sometimes being broken (#18254, #18219, #17690) --------- Co-authored-by: Bennet --- crates/multi_buffer/src/multi_buffer.rs | 220 +++++++++++++++--------- crates/search/src/project_search.rs | 73 +++----- 2 files changed, 163 insertions(+), 130 deletions(-) diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 0df196bb9829dc..828b39967d9e1f 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5,7 +5,7 @@ use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; use futures::{channel::mpsc, SinkExt}; -use gpui::{AppContext, EntityId, EventEmitter, Model, ModelContext}; +use gpui::{AppContext, EntityId, EventEmitter, Model, ModelContext, Task}; use itertools::Itertools; use language::{ language_settings::{language_settings, LanguageSettings}, @@ -1130,66 +1130,6 @@ impl MultiBuffer { } } - pub fn stream_excerpts_with_context_lines( - &mut self, - buffer: Model, - ranges: Vec>, - context_line_count: u32, - cx: &mut ModelContext, - ) -> mpsc::Receiver> { - let (buffer_id, buffer_snapshot) = - buffer.update(cx, |buffer, _| (buffer.remote_id(), buffer.snapshot())); - - let (mut tx, rx) = mpsc::channel(256); - cx.spawn(move |this, mut cx| async move { - let mut excerpt_ranges = Vec::new(); - let mut range_counts = Vec::new(); - cx.background_executor() - .scoped(|scope| { - scope.spawn(async { - let (ranges, counts) = - build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count); - excerpt_ranges = ranges; - range_counts = counts; - }); - }) - .await; - - let mut ranges = ranges.into_iter(); - let mut range_counts = range_counts.into_iter(); - for excerpt_ranges in excerpt_ranges.chunks(100) { - let excerpt_ids = match this.update(&mut cx, |this, cx| { - this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx) - }) { - Ok(excerpt_ids) => excerpt_ids, - Err(_) => return, - }; - - for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.by_ref()) - { - for range in ranges.by_ref().take(range_count) { - let start = Anchor { - buffer_id: Some(buffer_id), - excerpt_id, - text_anchor: range.start, - }; - let end = Anchor { - buffer_id: Some(buffer_id), - excerpt_id, - text_anchor: range.end, - }; - if tx.send(start..end).await.is_err() { - break; - } - } - } - } - }) - .detach(); - - rx - } - pub fn push_excerpts( &mut self, buffer: Model, @@ -1239,6 +1179,91 @@ impl MultiBuffer { anchor_ranges } + pub fn push_multiple_excerpts_with_context_lines( + &mut self, + buffers_with_ranges: Vec<(Model, Vec>)>, + context_line_count: u32, + cx: &mut ModelContext, + ) -> Task>> { + use futures::StreamExt; + + let (excerpt_ranges_tx, mut excerpt_ranges_rx) = mpsc::channel(256); + + let mut buffer_ids = Vec::with_capacity(buffers_with_ranges.len()); + + for (buffer, ranges) in buffers_with_ranges { + let (buffer_id, buffer_snapshot) = + buffer.update(cx, |buffer, _| (buffer.remote_id(), buffer.snapshot())); + + buffer_ids.push(buffer_id); + + cx.background_executor() + .spawn({ + let mut excerpt_ranges_tx = excerpt_ranges_tx.clone(); + + async move { + let (excerpt_ranges, counts) = + build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count); + excerpt_ranges_tx + .send((buffer_id, buffer.clone(), ranges, excerpt_ranges, counts)) + .await + .ok(); + } + }) + .detach() + } + + cx.spawn(move |this, mut cx| async move { + let mut results_by_buffer_id = HashMap::default(); + while let Some((buffer_id, buffer, ranges, excerpt_ranges, range_counts)) = + excerpt_ranges_rx.next().await + { + results_by_buffer_id + .insert(buffer_id, (buffer, ranges, excerpt_ranges, range_counts)); + } + + let mut multi_buffer_ranges = Vec::default(); + 'outer: for buffer_id in buffer_ids { + let Some((buffer, ranges, excerpt_ranges, range_counts)) = + results_by_buffer_id.remove(&buffer_id) + else { + continue; + }; + + let mut ranges = ranges.into_iter(); + let mut range_counts = range_counts.into_iter(); + for excerpt_ranges in excerpt_ranges.chunks(100) { + let excerpt_ids = match this.update(&mut cx, |this, cx| { + this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx) + }) { + Ok(excerpt_ids) => excerpt_ids, + Err(_) => continue 'outer, + }; + + for (excerpt_id, range_count) in + excerpt_ids.into_iter().zip(range_counts.by_ref()) + { + for range in ranges.by_ref().take(range_count) { + let start = Anchor { + buffer_id: Some(buffer_id), + excerpt_id, + text_anchor: range.start, + }; + let end = Anchor { + buffer_id: Some(buffer_id), + excerpt_id, + text_anchor: range.end, + }; + multi_buffer_ranges.push(start..end); + } + } + } + } + + multi_buffer_ranges + }) + } + pub fn insert_excerpts_after( &mut self, prev_excerpt_id: ExcerptId, @@ -5052,7 +5077,6 @@ where #[cfg(test)] mod tests { use super::*; - use futures::StreamExt; use gpui::{AppContext, Context, TestAppContext}; use language::{Buffer, Rope}; use parking_lot::RwLock; @@ -5601,41 +5625,67 @@ mod tests { ); } - #[gpui::test] - async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) { - let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); - let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { - let snapshot = buffer.read(cx); - let ranges = vec![ - snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)), - snapshot.anchor_before(Point::new(7, 1))..snapshot.anchor_before(Point::new(7, 3)), - snapshot.anchor_before(Point::new(15, 0)) - ..snapshot.anchor_before(Point::new(15, 0)), - ]; - multibuffer.stream_excerpts_with_context_lines(buffer.clone(), ranges, 2, cx) - }); + #[gpui::test(iterations = 100)] + async fn test_push_multiple_excerpts_with_context_lines(cx: &mut TestAppContext) { + let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); + let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(15, 4, 'a'), cx)); + let snapshot_1 = buffer_1.update(cx, |buffer, _| buffer.snapshot()); + let snapshot_2 = buffer_2.update(cx, |buffer, _| buffer.snapshot()); + let ranges_1 = vec![ + snapshot_1.anchor_before(Point::new(3, 2))..snapshot_1.anchor_before(Point::new(4, 2)), + snapshot_1.anchor_before(Point::new(7, 1))..snapshot_1.anchor_before(Point::new(7, 3)), + snapshot_1.anchor_before(Point::new(15, 0)) + ..snapshot_1.anchor_before(Point::new(15, 0)), + ]; + let ranges_2 = vec![ + snapshot_2.anchor_before(Point::new(2, 1))..snapshot_2.anchor_before(Point::new(3, 1)), + snapshot_2.anchor_before(Point::new(10, 0)) + ..snapshot_2.anchor_before(Point::new(10, 2)), + ]; - let anchor_ranges = anchor_ranges.collect::>().await; + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let anchor_ranges = multibuffer + .update(cx, |multibuffer, cx| { + multibuffer.push_multiple_excerpts_with_context_lines( + vec![(buffer_1.clone(), ranges_1), (buffer_2.clone(), ranges_2)], + 2, + cx, + ) + }) + .await; let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); assert_eq!( snapshot.text(), concat!( - "bbb\n", // + "bbb\n", // buffer_1 "ccc\n", // - "ddd\n", // - "eee\n", // + "ddd\n", // <-- excerpt 1 + "eee\n", // <-- excerpt 1 "fff\n", // "ggg\n", // - "hhh\n", // + "hhh\n", // <-- excerpt 2 "iii\n", // "jjj\n", // + // "nnn\n", // "ooo\n", // - "ppp\n", // + "ppp\n", // <-- excerpt 3 "qqq\n", // - "rrr", // + "rrr\n", // + // + "aaaa\n", // buffer 2 + "bbbb\n", // + "cccc\n", // <-- excerpt 4 + "dddd\n", // <-- excerpt 4 + "eeee\n", // + "ffff\n", // + // + "iiii\n", // + "jjjj\n", // + "kkkk\n", // <-- excerpt 5 + "llll\n", // + "mmmm", // ) ); @@ -5647,7 +5697,9 @@ mod tests { vec![ Point::new(2, 2)..Point::new(3, 2), Point::new(6, 1)..Point::new(6, 3), - Point::new(11, 0)..Point::new(11, 0) + Point::new(11, 0)..Point::new(11, 0), + Point::new(16, 1)..Point::new(17, 1), + Point::new(22, 0)..Point::new(22, 2) ] ); } diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index d5b719a657628e..ea94d27daf61d5 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -263,54 +263,35 @@ impl ProjectSearch { let mut limit_reached = false; while let Some(results) = matches.next().await { - let tasks = results - .into_iter() - .map(|result| { - let this = this.clone(); - - cx.spawn(|mut cx| async move { - match result { - project::search::SearchResult::Buffer { buffer, ranges } => { - let mut match_ranges_rx = - this.update(&mut cx, |this, cx| { - this.excerpts.update(cx, |excerpts, cx| { - excerpts.stream_excerpts_with_context_lines( - buffer, - ranges, - editor::DEFAULT_MULTIBUFFER_CONTEXT, - cx, - ) - }) - })?; - - let mut match_ranges = vec![]; - while let Some(range) = match_ranges_rx.next().await { - match_ranges.push(range); - } - anyhow::Ok((match_ranges, false)) - } - project::search::SearchResult::LimitReached => { - anyhow::Ok((vec![], true)) - } - } - }) - }) - .collect::>(); - - let result_ranges = futures::future::join_all(tasks).await; - let mut combined_ranges = vec![]; - for (ranges, result_limit_reached) in result_ranges.into_iter().flatten() { - combined_ranges.extend(ranges); - if result_limit_reached { - limit_reached = result_limit_reached; + let mut buffers_with_ranges = Vec::with_capacity(results.len()); + for result in results { + match result { + project::search::SearchResult::Buffer { buffer, ranges } => { + buffers_with_ranges.push((buffer, ranges)); + } + project::search::SearchResult::LimitReached => { + limit_reached = true; + } } } + + let match_ranges = this + .update(&mut cx, |this, cx| { + this.excerpts.update(cx, |excerpts, cx| { + excerpts.push_multiple_excerpts_with_context_lines( + buffers_with_ranges, + editor::DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ) + }) + }) + .ok()? + .await; + this.update(&mut cx, |this, cx| { - if !combined_ranges.is_empty() { - this.no_results = Some(false); - this.match_ranges.extend(combined_ranges); - cx.notify(); - } + this.no_results = Some(false); + this.match_ranges.extend(match_ranges); + cx.notify(); }) .ok()?; } @@ -2745,7 +2726,7 @@ pub mod tests { search_view .results_editor .update(cx, |editor, cx| editor.display_text(cx)), - "\n\n\nconst TWO: usize = one::ONE + one::ONE;\n\n\n\n\nconst ONE: usize = 1;\n", + "\n\n\nconst ONE: usize = 1;\n\n\n\n\nconst TWO: usize = one::ONE + one::ONE;\n", "New search in directory should have a filter that matches a certain directory" ); }) From 336b4a5690cee0714ae704348cb4944544fcdc99 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 24 Sep 2024 09:15:25 -0400 Subject: [PATCH 290/762] Tweak close stale issues configuration (#18275) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index afc28ec180b15e..be69ee2682cfc6 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -18,10 +18,14 @@ jobs: Thanks for your help! close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" + # We will increase `days-before-stale` to 365 on or after Jan 24th, + # 2024. This date marks one year since migrating issues from + # 'community' to 'zed' repository. The migration added activity to all + # issues, preventing 365 days from working until then. days-before-stale: 180 days-before-close: 7 any-of-issue-labels: "defect,panic / crash" - operations-per-run: 100 + operations-per-run: 200 ascending: true enable-statistics: true stale-issue-label: "stale" From 3a2f0653d16bdbacf2e090c15bcf424d96de9e64 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 24 Sep 2024 09:44:27 -0400 Subject: [PATCH 291/762] Bump operations-per-run parameter in stale issues action (#18276) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index be69ee2682cfc6..1f287fb5e3d4b9 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -25,7 +25,7 @@ jobs: days-before-stale: 180 days-before-close: 7 any-of-issue-labels: "defect,panic / crash" - operations-per-run: 200 + operations-per-run: 1000 ascending: true enable-statistics: true stale-issue-label: "stale" From 437bcc0ce6a270487fe6b2e2c42117433abe0946 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 24 Sep 2024 16:46:11 +0200 Subject: [PATCH 292/762] ssh project: Handle multiple paths and worktrees correctly (#18277) This makes SSH projects work with `ssh_connections` that have multiple paths: ```json { "ssh_connections": [ { "host": "127.0.0.1", "projects": [ { "paths": [ "/Users/thorstenball/work/projs/go-proj", "/Users/thorstenball/work/projs/rust-proj" ] } ] } ] } ``` @ConradIrwin @mikayla-maki since this wasn't really released yet, we didn't create a full-on migration, so old ssh projects that were already serialized need to either be manually deleted from the database, or the whole local DB wiped. Release Notes: - N/A --------- Co-authored-by: Bennet --- crates/recent_projects/src/recent_projects.rs | 8 +-- crates/workspace/src/persistence.rs | 64 ++++++++++--------- crates/workspace/src/persistence/model.rs | 42 +++++++----- crates/workspace/src/workspace.rs | 8 +-- 4 files changed, 66 insertions(+), 56 deletions(-) diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index cb3d3ab65950f0..20393d63e1a3df 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -268,7 +268,7 @@ impl PickerDelegate for RecentProjectsDelegate { .as_ref() .map(|port| port.to_string()) .unwrap_or_default(), - ssh_project.path, + ssh_project.paths.join(","), ssh_project .user .as_ref() @@ -403,7 +403,7 @@ impl PickerDelegate for RecentProjectsDelegate { password: None, }; - let paths = vec![PathBuf::from(ssh_project.path.clone())]; + let paths = ssh_project.paths.iter().map(PathBuf::from).collect(); cx.spawn(|_, mut cx| async move { open_ssh_project(connection_options, paths, app_state, open_options, &mut cx).await @@ -460,9 +460,7 @@ impl PickerDelegate for RecentProjectsDelegate { .filter_map(|i| paths.paths().get(*i).cloned()) .collect(), ), - SerializedWorkspaceLocation::Ssh(ssh_project) => { - Arc::new(vec![PathBuf::from(ssh_project.ssh_url())]) - } + SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()), SerializedWorkspaceLocation::DevServer(dev_server_project) => { Arc::new(vec![PathBuf::from(format!( "{}:{}", diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 034328a30b2ffa..3a0b8eabb90e6b 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -366,6 +366,9 @@ define_connection! { ); ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE; ), + sql!( + ALTER TABLE ssh_projects RENAME COLUMN path TO paths; + ), ]; } @@ -769,39 +772,40 @@ impl WorkspaceDb { &self, host: String, port: Option, - path: String, + paths: Vec, user: Option, ) -> Result { + let paths = serde_json::to_string(&paths)?; if let Some(project) = self - .get_ssh_project(host.clone(), port, path.clone(), user.clone()) + .get_ssh_project(host.clone(), port, paths.clone(), user.clone()) .await? { Ok(project) } else { - self.insert_ssh_project(host, port, path, user) + self.insert_ssh_project(host, port, paths, user) .await? .ok_or_else(|| anyhow!("failed to insert ssh project")) } } query! { - async fn get_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { - SELECT id, host, port, path, user + async fn get_ssh_project(host: String, port: Option, paths: String, user: Option) -> Result> { + SELECT id, host, port, paths, user FROM ssh_projects - WHERE host IS ? AND port IS ? AND path IS ? AND user IS ? + WHERE host IS ? AND port IS ? AND paths IS ? AND user IS ? LIMIT 1 } } query! { - async fn insert_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { + async fn insert_ssh_project(host: String, port: Option, paths: String, user: Option) -> Result> { INSERT INTO ssh_projects( host, port, - path, + paths, user ) VALUES (?1, ?2, ?3, ?4) - RETURNING id, host, port, path, user + RETURNING id, host, port, paths, user } } @@ -840,7 +844,7 @@ impl WorkspaceDb { query! { fn ssh_projects() -> Result> { - SELECT id, host, port, path, user + SELECT id, host, port, paths, user FROM ssh_projects } } @@ -1656,45 +1660,45 @@ mod tests { async fn test_get_or_create_ssh_project() { let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project").await); - let (host, port, path, user) = ( + let (host, port, paths, user) = ( "example.com".to_string(), Some(22_u16), - "/home/user".to_string(), + vec!["/home/user".to_string(), "/etc/nginx".to_string()], Some("user".to_string()), ); let project = db - .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .get_or_create_ssh_project(host.clone(), port, paths.clone(), user.clone()) .await .unwrap(); assert_eq!(project.host, host); - assert_eq!(project.path, path); + assert_eq!(project.paths, paths); assert_eq!(project.user, user); // Test that calling the function again with the same parameters returns the same project let same_project = db - .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .get_or_create_ssh_project(host.clone(), port, paths.clone(), user.clone()) .await .unwrap(); assert_eq!(project.id, same_project.id); // Test with different parameters - let (host2, path2, user2) = ( + let (host2, paths2, user2) = ( "otherexample.com".to_string(), - "/home/otheruser".to_string(), + vec!["/home/otheruser".to_string()], Some("otheruser".to_string()), ); let different_project = db - .get_or_create_ssh_project(host2.clone(), None, path2.clone(), user2.clone()) + .get_or_create_ssh_project(host2.clone(), None, paths2.clone(), user2.clone()) .await .unwrap(); assert_ne!(project.id, different_project.id); assert_eq!(different_project.host, host2); - assert_eq!(different_project.path, path2); + assert_eq!(different_project.paths, paths2); assert_eq!(different_project.user, user2); } @@ -1702,25 +1706,25 @@ mod tests { async fn test_get_or_create_ssh_project_with_null_user() { let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project_with_null_user").await); - let (host, port, path, user) = ( + let (host, port, paths, user) = ( "example.com".to_string(), None, - "/home/user".to_string(), + vec!["/home/user".to_string()], None, ); let project = db - .get_or_create_ssh_project(host.clone(), port, path.clone(), None) + .get_or_create_ssh_project(host.clone(), port, paths.clone(), None) .await .unwrap(); assert_eq!(project.host, host); - assert_eq!(project.path, path); + assert_eq!(project.paths, paths); assert_eq!(project.user, None); // Test that calling the function again with the same parameters returns the same project let same_project = db - .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .get_or_create_ssh_project(host.clone(), port, paths.clone(), user.clone()) .await .unwrap(); @@ -1735,32 +1739,32 @@ mod tests { ( "example.com".to_string(), None, - "/home/user".to_string(), + vec!["/home/user".to_string()], None, ), ( "anotherexample.com".to_string(), Some(123_u16), - "/home/user2".to_string(), + vec!["/home/user2".to_string()], Some("user2".to_string()), ), ( "yetanother.com".to_string(), Some(345_u16), - "/home/user3".to_string(), + vec!["/home/user3".to_string(), "/proc/1234/exe".to_string()], None, ), ]; - for (host, port, path, user) in projects.iter() { + for (host, port, paths, user) in projects.iter() { let project = db - .get_or_create_ssh_project(host.clone(), *port, path.clone(), user.clone()) + .get_or_create_ssh_project(host.clone(), *port, paths.clone(), user.clone()) .await .unwrap(); assert_eq!(&project.host, host); assert_eq!(&project.port, port); - assert_eq!(&project.path, path); + assert_eq!(&project.paths, paths); assert_eq!(&project.user, user); } diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 0ad3fa5e606e5b..7528e4c3934c57 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -26,24 +26,29 @@ pub struct SerializedSshProject { pub id: SshProjectId, pub host: String, pub port: Option, - pub path: String, + pub paths: Vec, pub user: Option, } impl SerializedSshProject { - pub fn ssh_url(&self) -> String { - let mut result = String::from("ssh://"); - if let Some(user) = &self.user { - result.push_str(user); - result.push('@'); - } - result.push_str(&self.host); - if let Some(port) = &self.port { - result.push(':'); - result.push_str(&port.to_string()); - } - result.push_str(&self.path); - result + pub fn ssh_urls(&self) -> Vec { + self.paths + .iter() + .map(|path| { + let mut result = String::new(); + if let Some(user) = &self.user { + result.push_str(user); + result.push('@'); + } + result.push_str(&self.host); + if let Some(port) = &self.port { + result.push(':'); + result.push_str(&port.to_string()); + } + result.push_str(path); + PathBuf::from(result) + }) + .collect() } } @@ -58,7 +63,8 @@ impl Bind for &SerializedSshProject { let next_index = statement.bind(&self.id.0, start_index)?; let next_index = statement.bind(&self.host, next_index)?; let next_index = statement.bind(&self.port, next_index)?; - let next_index = statement.bind(&self.path, next_index)?; + let raw_paths = serde_json::to_string(&self.paths)?; + let next_index = statement.bind(&raw_paths, next_index)?; statement.bind(&self.user, next_index) } } @@ -68,7 +74,9 @@ impl Column for SerializedSshProject { let id = statement.column_int64(start_index)?; let host = statement.column_text(start_index + 1)?.to_string(); let (port, _) = Option::::column(statement, start_index + 2)?; - let path = statement.column_text(start_index + 3)?.to_string(); + let raw_paths = statement.column_text(start_index + 3)?.to_string(); + let paths: Vec = serde_json::from_str(&raw_paths)?; + let (user, _) = Option::::column(statement, start_index + 4)?; Ok(( @@ -76,7 +84,7 @@ impl Column for SerializedSshProject { id: SshProjectId(id as u64), host, port, - path, + paths, user, }, start_index + 5, diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b732eb5bc70c1b..98f793c234aaeb 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5516,14 +5516,14 @@ pub fn open_ssh_project( cx: &mut AppContext, ) -> Task> { cx.spawn(|mut cx| async move { - // TODO: Handle multiple paths - let path = paths.iter().next().cloned().unwrap_or_default(); - let serialized_ssh_project = persistence::DB .get_or_create_ssh_project( connection_options.host.clone(), connection_options.port, - path.to_string_lossy().to_string(), + paths + .iter() + .map(|path| path.to_string_lossy().to_string()) + .collect::>(), connection_options.username.clone(), ) .await?; From e87d6da2a67c46839c32512c1287ed7a435e2d0d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastijan=20Kelneri=C4=8D?= Date: Tue, 24 Sep 2024 16:49:07 +0200 Subject: [PATCH 293/762] Implement grapheme support for supermaven completions (#18279) Closes [#18278](https://github.com/zed-industries/zed/issues/18278) Release Notes: - Fixed a panic when graphemes are included in supermaven completions --- Cargo.lock | 1 + crates/supermaven/Cargo.toml | 1 + .../src/supermaven_completion_provider.rs | 22 ++++++++++--------- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 09a68973340a57..9c6d2fb7b9b842 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11006,6 +11006,7 @@ dependencies = [ "text", "theme", "ui", + "unicode-segmentation", "util", "windows 0.58.0", ] diff --git a/crates/supermaven/Cargo.toml b/crates/supermaven/Cargo.toml index b8f85c0f056184..e04d0ef51bbf83 100644 --- a/crates/supermaven/Cargo.toml +++ b/crates/supermaven/Cargo.toml @@ -29,6 +29,7 @@ supermaven_api.workspace = true smol.workspace = true text.workspace = true ui.workspace = true +unicode-segmentation.workspace = true util.workspace = true [target.'cfg(target_os = "windows")'.dependencies] diff --git a/crates/supermaven/src/supermaven_completion_provider.rs b/crates/supermaven/src/supermaven_completion_provider.rs index 261ce372d9f717..2a7fc31c0db8fe 100644 --- a/crates/supermaven/src/supermaven_completion_provider.rs +++ b/crates/supermaven/src/supermaven_completion_provider.rs @@ -12,6 +12,7 @@ use std::{ time::Duration, }; use text::{ToOffset, ToPoint}; +use unicode_segmentation::UnicodeSegmentation; pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); @@ -54,33 +55,34 @@ fn completion_state_from_diff( ) -> CompletionProposal { let buffer_text = snapshot .text_for_range(delete_range.clone()) - .collect::() - .chars() - .collect::>(); + .collect::(); let mut inlays: Vec = Vec::new(); - let completion = completion_text.chars().collect::>(); + let completion_graphemes: Vec<&str> = completion_text.graphemes(true).collect(); + let buffer_graphemes: Vec<&str> = buffer_text.graphemes(true).collect(); let mut offset = position.to_offset(&snapshot); let mut i = 0; let mut j = 0; - while i < completion.len() && j < buffer_text.len() { + while i < completion_graphemes.len() && j < buffer_graphemes.len() { // find the next instance of the buffer text in the completion text. - let k = completion[i..].iter().position(|c| *c == buffer_text[j]); + let k = completion_graphemes[i..] + .iter() + .position(|c| *c == buffer_graphemes[j]); match k { Some(k) => { if k != 0 { // the range from the current position to item is an inlay. inlays.push(InlayProposal::Suggestion( snapshot.anchor_after(offset), - completion_text[i..i + k].into(), + completion_graphemes[i..i + k].join("").into(), )); } i += k + 1; j += 1; - offset.add_assign(1); + offset.add_assign(buffer_graphemes[j - 1].len()); } None => { // there are no more matching completions, so drop the remaining @@ -90,11 +92,11 @@ fn completion_state_from_diff( } } - if j == buffer_text.len() && i < completion.len() { + if j == buffer_graphemes.len() && i < completion_graphemes.len() { // there is leftover completion text, so drop it as an inlay. inlays.push(InlayProposal::Suggestion( snapshot.anchor_after(offset), - completion_text[i..completion_text.len()].into(), + completion_graphemes[i..].join("").into(), )); } From 2470db490115322720f85e2ad57e922de930ee1b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 24 Sep 2024 18:21:26 +0300 Subject: [PATCH 294/762] Reuse buffer search queries on tab switch (#18281) Before this change, with a large chunk of text as a search query (N*10^5 in my experiments) and the buffer search bar visible, switching between editor tabs was very slow, even if the editors were N*10^2 lines long. The slow switch was caused by Zed always re-creating the Aho-Corasick queries, which is now reused. Release Notes: - Improved buffer search performance when switching tabs Co-authored-by: Piotr Osiewicz --- crates/search/src/buffer_search.rs | 98 +++++++++++++++++------------- 1 file changed, 55 insertions(+), 43 deletions(-) diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 1c37bfd481dcb9..9ba7dfd7965b15 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -440,7 +440,7 @@ impl ToolbarItemView for BufferSearchBar { )); self.active_searchable_item = Some(searchable_item_handle); - drop(self.update_matches(cx)); + drop(self.update_matches(true, cx)); if !self.dismissed { return ToolbarItemLocation::Secondary; } @@ -701,7 +701,8 @@ impl BufferSearchBar { cx: &mut ViewContext, ) -> oneshot::Receiver<()> { let options = options.unwrap_or(self.default_options); - if query != self.query(cx) || self.search_options != options { + let updated = query != self.query(cx) || self.search_options != options; + if updated { self.query_editor.update(cx, |query_editor, cx| { query_editor.buffer().update(cx, |query_buffer, cx| { let len = query_buffer.len(cx); @@ -712,7 +713,7 @@ impl BufferSearchBar { self.clear_matches(cx); cx.notify(); } - self.update_matches(cx) + self.update_matches(!updated, cx) } fn render_search_option_button( @@ -738,7 +739,7 @@ impl BufferSearchBar { ) { self.search_options.toggle(search_option); self.default_options = self.search_options; - drop(self.update_matches(cx)); + drop(self.update_matches(false, cx)); cx.notify(); } @@ -841,7 +842,7 @@ impl BufferSearchBar { editor::EditorEvent::Edited { .. } => { self.smartcase(cx); self.clear_matches(cx); - let search = self.update_matches(cx); + let search = self.update_matches(false, cx); let width = editor.update(cx, |editor, cx| { let text_layout_details = editor.text_layout_details(cx); @@ -879,7 +880,7 @@ impl BufferSearchBar { fn on_active_searchable_item_event(&mut self, event: &SearchEvent, cx: &mut ViewContext) { match event { SearchEvent::MatchesInvalidated => { - drop(self.update_matches(cx)); + drop(self.update_matches(false, cx)); } SearchEvent::ActiveMatchChanged => self.update_match_index(cx), } @@ -897,7 +898,7 @@ impl BufferSearchBar { if let Some(active_item) = self.active_searchable_item.as_mut() { self.selection_search_enabled = !self.selection_search_enabled; active_item.toggle_filtered_search_ranges(self.selection_search_enabled, cx); - drop(self.update_matches(cx)); + drop(self.update_matches(false, cx)); cx.notify(); } } @@ -937,7 +938,11 @@ impl BufferSearchBar { .extend(active_item_matches); } - fn update_matches(&mut self, cx: &mut ViewContext) -> oneshot::Receiver<()> { + fn update_matches( + &mut self, + reuse_existing_query: bool, + cx: &mut ViewContext, + ) -> oneshot::Receiver<()> { let (done_tx, done_rx) = oneshot::channel(); let query = self.query(cx); self.pending_search.take(); @@ -949,44 +954,51 @@ impl BufferSearchBar { let _ = done_tx.send(()); cx.notify(); } else { - let query: Arc<_> = if self.search_options.contains(SearchOptions::REGEX) { - match SearchQuery::regex( - query, - self.search_options.contains(SearchOptions::WHOLE_WORD), - self.search_options.contains(SearchOptions::CASE_SENSITIVE), - false, - Default::default(), - Default::default(), - None, - ) { - Ok(query) => query.with_replacement(self.replacement(cx)), - Err(_) => { - self.query_contains_error = true; - self.clear_active_searchable_item_matches(cx); - cx.notify(); - return done_rx; - } - } + let query: Arc<_> = if let Some(search) = + self.active_search.take().filter(|_| reuse_existing_query) + { + search } else { - match SearchQuery::text( - query, - self.search_options.contains(SearchOptions::WHOLE_WORD), - self.search_options.contains(SearchOptions::CASE_SENSITIVE), - false, - Default::default(), - Default::default(), - None, - ) { - Ok(query) => query.with_replacement(self.replacement(cx)), - Err(_) => { - self.query_contains_error = true; - self.clear_active_searchable_item_matches(cx); - cx.notify(); - return done_rx; + if self.search_options.contains(SearchOptions::REGEX) { + match SearchQuery::regex( + query, + self.search_options.contains(SearchOptions::WHOLE_WORD), + self.search_options.contains(SearchOptions::CASE_SENSITIVE), + false, + Default::default(), + Default::default(), + None, + ) { + Ok(query) => query.with_replacement(self.replacement(cx)), + Err(_) => { + self.query_contains_error = true; + self.clear_active_searchable_item_matches(cx); + cx.notify(); + return done_rx; + } + } + } else { + match SearchQuery::text( + query, + self.search_options.contains(SearchOptions::WHOLE_WORD), + self.search_options.contains(SearchOptions::CASE_SENSITIVE), + false, + Default::default(), + Default::default(), + None, + ) { + Ok(query) => query.with_replacement(self.replacement(cx)), + Err(_) => { + self.query_contains_error = true; + self.clear_active_searchable_item_matches(cx); + cx.notify(); + return done_rx; + } } } - } - .into(); + .into() + }; + self.active_search = Some(query.clone()); let query_text = query.as_str().to_string(); From 21be70f278acc2818c628bb0e8d33c8648655e34 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 24 Sep 2024 11:40:08 -0400 Subject: [PATCH 295/762] Improve diff hunks (#18283) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR improves the display of diff hunks: - Deleted hunks now show a regular line indicator in the gutter when expanded - The rounding on the diff indicators in the gutter has been removed. We also did some refactoring to ensure the sizing of the diff indicators in the gutter were consistent. #### Collapsed Screenshot 2024-09-24 at 11 13 26 AM #### Expanded Screenshot 2024-09-24 at 11 13 35 AM Release Notes: - Improved the appearance of diff hunks in the editor. --------- Co-authored-by: Max --- crates/editor/src/editor.rs | 5 -- crates/editor/src/element.rs | 120 +++++++++++++++++++++++++-------- crates/editor/src/git.rs | 4 +- crates/editor/src/hunk_diff.rs | 77 +++++++++------------ 4 files changed, 124 insertions(+), 82 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index dc536471023f08..a32910e78ab973 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12473,11 +12473,6 @@ impl Editor { Some(gpui::Point::new(source_x, source_y)) } - fn gutter_bounds(&self) -> Option> { - let bounds = self.last_bounds?; - Some(element::gutter_bounds(bounds, self.gutter_dimensions)) - } - pub fn has_active_completions_menu(&self) -> bool { self.context_menu.read().as_ref().map_or(false, |menu| { menu.visible() && matches!(menu, ContextMenu::Completions(_)) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index d4075431ff602b..3be71aeefba942 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1269,6 +1269,7 @@ impl EditorElement { line_height: Pixels, gutter_hitbox: &Hitbox, display_rows: Range, + anchor_range: Range, snapshot: &EditorSnapshot, cx: &mut WindowContext, ) -> Vec<(DisplayDiffHunk, Option)> { @@ -1289,30 +1290,84 @@ impl EditorElement { .git .git_gutter .unwrap_or_default(); - let display_hunks = buffer_snapshot - .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) - .map(|hunk| diff_hunk_to_display(&hunk, snapshot)) - .dedup() - .map(|hunk| match git_gutter_setting { - GitGutterSetting::TrackedFiles => { - let hitbox = match hunk { - DisplayDiffHunk::Unfolded { .. } => { - let hunk_bounds = Self::diff_hunk_bounds( - snapshot, - line_height, - gutter_hitbox.bounds, - &hunk, - ); - Some(cx.insert_hitbox(hunk_bounds, true)) + + self.editor.update(cx, |editor, cx| { + let expanded_hunks = &editor.expanded_hunks.hunks; + let expanded_hunks_start_ix = expanded_hunks + .binary_search_by(|hunk| { + hunk.hunk_range + .end + .cmp(&anchor_range.start, &buffer_snapshot) + .then(Ordering::Less) + }) + .unwrap_err(); + let mut expanded_hunks = expanded_hunks[expanded_hunks_start_ix..].iter().peekable(); + + let display_hunks = buffer_snapshot + .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) + .filter_map(|hunk| { + let mut display_hunk = diff_hunk_to_display(&hunk, snapshot); + + if let DisplayDiffHunk::Unfolded { + multi_buffer_range, + status, + .. + } = &mut display_hunk + { + let mut is_expanded = false; + while let Some(expanded_hunk) = expanded_hunks.peek() { + match expanded_hunk + .hunk_range + .start + .cmp(&multi_buffer_range.start, &buffer_snapshot) + { + Ordering::Less => { + expanded_hunks.next(); + } + Ordering::Equal => { + is_expanded = true; + break; + } + Ordering::Greater => { + break; + } + } } - DisplayDiffHunk::Folded { .. } => None, - }; - (hunk, hitbox) - } - GitGutterSetting::Hide => (hunk, None), - }) - .collect(); - display_hunks + match status { + DiffHunkStatus::Added => {} + DiffHunkStatus::Modified => {} + DiffHunkStatus::Removed => { + if is_expanded { + return None; + } + } + } + } + + Some(display_hunk) + }) + .dedup() + .map(|hunk| match git_gutter_setting { + GitGutterSetting::TrackedFiles => { + let hitbox = match hunk { + DisplayDiffHunk::Unfolded { .. } => { + let hunk_bounds = Self::diff_hunk_bounds( + snapshot, + line_height, + gutter_hitbox.bounds, + &hunk, + ); + Some(cx.insert_hitbox(hunk_bounds, true)) + } + DisplayDiffHunk::Folded { .. } => None, + }; + (hunk, hitbox) + } + GitGutterSetting::Hide => (hunk, None), + }) + .collect(); + display_hunks + }) } #[allow(clippy::too_many_arguments)] @@ -3187,7 +3242,7 @@ impl EditorElement { Some(( hunk_bounds, cx.theme().status().modified, - Corners::all(1. * line_height), + Corners::all(px(0.)), )) } DisplayDiffHunk::Unfolded { status, .. } => { @@ -3195,12 +3250,12 @@ impl EditorElement { DiffHunkStatus::Added => ( hunk_hitbox.bounds, cx.theme().status().created, - Corners::all(0.05 * line_height), + Corners::all(px(0.)), ), DiffHunkStatus::Modified => ( hunk_hitbox.bounds, cx.theme().status().modified, - Corners::all(0.05 * line_height), + Corners::all(px(0.)), ), DiffHunkStatus::Removed => ( Bounds::new( @@ -3244,7 +3299,7 @@ impl EditorElement { let start_y = display_row.as_f32() * line_height - scroll_top; let end_y = start_y + line_height; - let width = 0.275 * line_height; + let width = Self::diff_hunk_strip_width(line_height); let highlight_origin = gutter_bounds.origin + point(px(0.), start_y); let highlight_size = size(width, end_y - start_y); Bounds::new(highlight_origin, highlight_size) @@ -3277,7 +3332,7 @@ impl EditorElement { let start_y = start_row.as_f32() * line_height - scroll_top; let end_y = end_row_in_current_excerpt.as_f32() * line_height - scroll_top; - let width = 0.275 * line_height; + let width = Self::diff_hunk_strip_width(line_height); let highlight_origin = gutter_bounds.origin + point(px(0.), start_y); let highlight_size = size(width, end_y - start_y); Bounds::new(highlight_origin, highlight_size) @@ -3289,7 +3344,7 @@ impl EditorElement { let start_y = row.as_f32() * line_height - offset - scroll_top; let end_y = start_y + line_height; - let width = 0.35 * line_height; + let width = (0.35 * line_height).floor(); let highlight_origin = gutter_bounds.origin + point(px(0.), start_y); let highlight_size = size(width, end_y - start_y); Bounds::new(highlight_origin, highlight_size) @@ -3298,6 +3353,12 @@ impl EditorElement { } } + /// Returns the width of the diff strip that will be displayed in the gutter. + pub(super) fn diff_hunk_strip_width(line_height: Pixels) -> Pixels { + // We floor the value to prevent pixel rounding. + (0.275 * line_height).floor() + } + fn paint_gutter_indicators(&self, layout: &mut EditorLayout, cx: &mut WindowContext) { cx.paint_layer(layout.gutter_hitbox.bounds, |cx| { cx.with_element_namespace("gutter_fold_toggles", |cx| { @@ -5158,6 +5219,7 @@ impl Element for EditorElement { line_height, &gutter_hitbox, start_row..end_row, + start_anchor..end_anchor, &snapshot, cx, ); diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index 79b78d5d148488..fb18ca45a2a2ff 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -90,8 +90,8 @@ pub fn diff_hunk_to_display( let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start); let hunk_end_point = Point::new(hunk_end_row.0, 0); - let multi_buffer_start = snapshot.buffer_snapshot.anchor_after(hunk_start_point); - let multi_buffer_end = snapshot.buffer_snapshot.anchor_before(hunk_end_point); + let multi_buffer_start = snapshot.buffer_snapshot.anchor_before(hunk_start_point); + let multi_buffer_end = snapshot.buffer_snapshot.anchor_after(hunk_end_point); let end = hunk_end_point.to_display_point(snapshot).row(); DisplayDiffHunk::Unfolded { diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 917d07ec4ee85b..90836cee51683c 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -14,8 +14,8 @@ use multi_buffer::{ use settings::SettingsStore; use text::{BufferId, Point}; use ui::{ - div, h_flex, rems, v_flex, ActiveTheme, Context as _, ContextMenu, InteractiveElement, - IntoElement, ParentElement, Pixels, Styled, ViewContext, VisualContext, + prelude::*, ActiveTheme, ContextMenu, InteractiveElement, IntoElement, ParentElement, Pixels, + Styled, ViewContext, VisualContext, }; use util::{debug_panic, RangeExt}; @@ -38,7 +38,7 @@ pub(super) struct HoveredHunk { #[derive(Debug, Default)] pub(super) struct ExpandedHunks { - hunks: Vec, + pub(crate) hunks: Vec, diff_base: HashMap, hunk_update_tasks: HashMap, Task<()>>, } @@ -414,39 +414,22 @@ impl Editor { style: BlockStyle::Flex, disposition: BlockDisposition::Above, render: Box::new(move |cx| { - let Some(gutter_bounds) = editor.read(cx).gutter_bounds() else { - return div().into_any_element(); - }; - let (gutter_dimensions, hunk_bounds, close_button) = - editor.update(cx.context, |editor, cx| { - let editor_snapshot = editor.snapshot(cx); - let hunk_display_range = hunk - .multi_buffer_range - .clone() - .to_display_points(&editor_snapshot); - let gutter_dimensions = editor.gutter_dimensions; - let hunk_bounds = EditorElement::diff_hunk_bounds( - &editor_snapshot, - cx.line_height(), - gutter_bounds, - &DisplayDiffHunk::Unfolded { - diff_base_byte_range: hunk.diff_base_byte_range.clone(), - multi_buffer_range: hunk.multi_buffer_range.clone(), - display_row_range: hunk_display_range.start.row() - ..hunk_display_range.end.row(), - status: hunk.status, - }, - ); - - let close_button = editor.close_hunk_diff_button( - hunk.clone(), - hunk_display_range.start.row(), - cx, - ); - (gutter_dimensions, hunk_bounds, close_button) - }); - let click_editor = editor.clone(); - let clicked_hunk = hunk.clone(); + let width = EditorElement::diff_hunk_strip_width(cx.line_height()); + let gutter_dimensions = editor.read(cx.context).gutter_dimensions; + + let close_button = editor.update(cx.context, |editor, cx| { + let editor_snapshot = editor.snapshot(cx); + let hunk_display_range = hunk + .multi_buffer_range + .clone() + .to_display_points(&editor_snapshot); + editor.close_hunk_diff_button( + hunk.clone(), + hunk_display_range.start.row(), + cx, + ) + }); + h_flex() .id("gutter with editor") .bg(deleted_hunk_color) @@ -461,27 +444,29 @@ impl Editor { .child( h_flex() .id("gutter hunk") + .bg(cx.theme().status().deleted) .pl(gutter_dimensions.margin + gutter_dimensions .git_blame_entries_width .unwrap_or_default()) - .max_w(hunk_bounds.size.width) - .min_w(hunk_bounds.size.width) + .max_w(width) + .min_w(width) .size_full() .cursor(CursorStyle::PointingHand) .on_mouse_down(MouseButton::Left, { - let click_hunk = hunk.clone(); - move |e, cx| { - let modifiers = e.modifiers; + let editor = editor.clone(); + let hunk = hunk.clone(); + move |event, cx| { + let modifiers = event.modifiers; if modifiers.control || modifiers.platform { - click_editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&click_hunk, cx); + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); }); } else { - click_editor.update(cx, |editor, cx| { + editor.update(cx, |editor, cx| { editor.open_hunk_context_menu( - clicked_hunk.clone(), - e.position, + hunk.clone(), + event.position, cx, ); }); From 5e62bbfd29172c966c4a9e494d0063acdba639b9 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 09:44:53 -0600 Subject: [PATCH 296/762] Run system npm directly (#18280) Release Notes: - N/A --- crates/node_runtime/src/node_runtime.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 72c74ce7cf9830..9507eb75364859 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -538,11 +538,10 @@ impl NodeRuntimeTrait for SystemNodeRuntime { subcommand: &str, args: &[&str], ) -> anyhow::Result { - let mut command = Command::new(self.node.clone()); + let mut command = Command::new(self.npm.clone()); command .env_clear() .env("PATH", std::env::var_os("PATH").unwrap_or_default()) - .arg(self.npm.clone()) .arg(subcommand) .args(["--cache".into(), self.scratch_dir.join("cache")]) .args([ From 0e86ba0983bfa186030996c62e0163778665bfef Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 10:13:53 -0600 Subject: [PATCH 297/762] Fix get_cached_binary for eslint (#18286) Release Notes: - Fixed running ESLint offline. --- crates/language/src/language.rs | 3 +- crates/languages/src/typescript.rs | 46 +++++++++++++----------------- 2 files changed, 22 insertions(+), 27 deletions(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 166d846f86e8bc..d70650cf449356 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -384,8 +384,9 @@ pub trait LspAdapter: 'static + Send + Sync { .await { log::info!( - "failed to fetch newest version of language server {:?}. falling back to using {:?}", + "failed to fetch newest version of language server {:?}. error: {:?}, falling back to using {:?}", self.name(), + error, prev_downloaded_binary.path ); binary = Ok(prev_downloaded_binary); diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 25a97c8014d0a1..b7eb21132d52c4 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -297,7 +297,8 @@ pub struct EsLintLspAdapter { } impl EsLintLspAdapter { - const CURRENT_VERSION: &'static str = "release/2.4.4"; + const CURRENT_VERSION: &'static str = "2.4.4"; + const CURRENT_VERSION_TAG_NAME: &'static str = "release/2.4.4"; #[cfg(not(windows))] const GITHUB_ASSET_KIND: AssetKind = AssetKind::TarGz; @@ -313,6 +314,10 @@ impl EsLintLspAdapter { pub fn new(node: NodeRuntime) -> Self { EsLintLspAdapter { node } } + + fn build_destination_path(container_dir: &Path) -> PathBuf { + container_dir.join(format!("vscode-eslint-{}", Self::CURRENT_VERSION)) + } } #[async_trait(?Send)] @@ -413,7 +418,7 @@ impl LspAdapter for EsLintLspAdapter { ) -> Result> { let url = build_asset_url( "microsoft/vscode-eslint", - Self::CURRENT_VERSION, + Self::CURRENT_VERSION_TAG_NAME, Self::GITHUB_ASSET_KIND, )?; @@ -430,7 +435,7 @@ impl LspAdapter for EsLintLspAdapter { delegate: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); - let destination_path = container_dir.join(format!("vscode-eslint-{}", version.name)); + let destination_path = Self::build_destination_path(&container_dir); let server_path = destination_path.join(Self::SERVER_PATH); if fs::metadata(&server_path).await.is_err() { @@ -496,38 +501,27 @@ impl LspAdapter for EsLintLspAdapter { container_dir: PathBuf, _: &dyn LspAdapterDelegate, ) -> Option { - get_cached_eslint_server_binary(container_dir, &self.node).await + let server_path = + Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH); + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, + env: None, + arguments: eslint_server_binary_arguments(&server_path), + }) } async fn installation_test_binary( &self, container_dir: PathBuf, ) -> Option { - get_cached_eslint_server_binary(container_dir, &self.node).await - } -} - -async fn get_cached_eslint_server_binary( - container_dir: PathBuf, - node: &NodeRuntime, -) -> Option { - maybe!(async { - // This is unfortunate but we don't know what the version is to build a path directly - let mut dir = fs::read_dir(&container_dir).await?; - let first = dir.next().await.ok_or(anyhow!("missing first file"))??; - if !first.file_type().await?.is_dir() { - return Err(anyhow!("First entry is not a directory")); - } - let server_path = first.path().join(EsLintLspAdapter::SERVER_PATH); - - Ok(LanguageServerBinary { - path: node.binary_path().await?, + let server_path = + Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH); + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, env: None, arguments: eslint_server_binary_arguments(&server_path), }) - }) - .await - .log_err() + } } #[cfg(target_os = "windows")] From b69c6ee7dfdcffd94024491776c68189a9e9d6ea Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 10:17:43 -0600 Subject: [PATCH 298/762] Exclude initialization failed errors from slack (#18232) Release Notes: - N/A --- crates/collab/src/api/events.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index f8ae53201304fb..1632c2d798ef15 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -369,6 +369,10 @@ fn report_to_slack(panic: &Panic) -> bool { return false; } + if panic.payload.contains("ERROR_INITIALIZATION_FAILED") { + return false; + } + if panic .payload .contains("GPU has crashed, and no debug information is available") From 4a4d8c1cabcf8f898cdba496e08b2b27a4386922 Mon Sep 17 00:00:00 2001 From: "Sergio C." Date: Tue, 24 Sep 2024 13:21:57 -0300 Subject: [PATCH 299/762] vim: Add ability to spawn multicursors at beginning/end of line (#18183) Closes #17842 Release Notes: - Added the ability to spawn multiple cursors through the g-A and g-I motions while in visual select mode. --- assets/keymaps/vim.json | 2 + crates/vim/src/visual.rs | 85 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 86 insertions(+), 1 deletion(-) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 8d933f19afb1dd..6656ea0ddf22c3 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -292,6 +292,8 @@ "g ctrl-x": ["vim::Decrement", { "step": true }], "shift-i": "vim::InsertBefore", "shift-a": "vim::InsertAfter", + "g I": "vim::VisualInsertFirstNonWhiteSpace", + "g A": "vim::VisualInsertEndOfLine", "shift-j": "vim::JoinLines", "r": ["vim::PushOperator", "Replace"], "ctrl-c": ["vim::SwitchMode", "Normal"], diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 55dc7885200b04..1503eaac1b6b43 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -15,7 +15,7 @@ use util::ResultExt; use workspace::searchable::Direction; use crate::{ - motion::{start_of_line, Motion}, + motion::{first_non_whitespace, next_line_end, start_of_line, Motion}, object::Object, state::{Mode, Operator}, Vim, @@ -37,6 +37,8 @@ actions!( SelectNextMatch, SelectPreviousMatch, RestoreVisualSelection, + VisualInsertEndOfLine, + VisualInsertFirstNonWhiteSpace, ] ); @@ -51,6 +53,8 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.toggle_mode(Mode::VisualBlock, cx) }); Vim::action(editor, cx, Vim::other_end); + Vim::action(editor, cx, Vim::visual_insert_end_of_line); + Vim::action(editor, cx, Vim::visual_insert_first_non_white_space); Vim::action(editor, cx, |vim, _: &VisualDelete, cx| { vim.record_current_action(cx); vim.visual_delete(false, cx); @@ -374,6 +378,39 @@ impl Vim { } } + fn visual_insert_end_of_line(&mut self, _: &VisualInsertEndOfLine, cx: &mut ViewContext) { + self.update_editor(cx, |_, editor, cx| { + editor.split_selection_into_lines(&Default::default(), cx); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, cursor, _| { + (next_line_end(map, cursor, 1), SelectionGoal::None) + }); + }); + }); + + self.switch_mode(Mode::Insert, false, cx); + } + + fn visual_insert_first_non_white_space( + &mut self, + _: &VisualInsertFirstNonWhiteSpace, + cx: &mut ViewContext, + ) { + self.update_editor(cx, |_, editor, cx| { + editor.split_selection_into_lines(&Default::default(), cx); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_cursors_with(|map, cursor, _| { + ( + first_non_whitespace(map, false, cursor), + SelectionGoal::None, + ) + }); + }); + }); + + self.switch_mode(Mode::Insert, false, cx); + } + fn toggle_mode(&mut self, mode: Mode, cx: &mut ViewContext) { if self.mode == mode { self.switch_mode(Mode::Normal, false, cx); @@ -714,6 +751,52 @@ mod test { ˇ"}); } + #[gpui::test] + async fn test_visual_insert_first_non_whitespace(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! { + "«The quick brown + fox jumps over + the lazy dogˇ»" + }, + Mode::Visual, + ); + cx.simulate_keystrokes("g I"); + cx.assert_state( + indoc! { + "ˇThe quick brown + ˇfox jumps over + ˇthe lazy dog" + }, + Mode::Insert, + ); + } + + #[gpui::test] + async fn test_visual_insert_end_of_line(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! { + "«The quick brown + fox jumps over + the lazy dogˇ»" + }, + Mode::Visual, + ); + cx.simulate_keystrokes("g A"); + cx.assert_state( + indoc! { + "The quick brownˇ + fox jumps overˇ + the lazy dogˇ" + }, + Mode::Insert, + ); + } + #[gpui::test] async fn test_enter_visual_line_mode(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; From fd07fef4dbc475381706c338e9600bd099af162d Mon Sep 17 00:00:00 2001 From: ClanEver <562211524@qq.com> Date: Wed, 25 Sep 2024 00:27:29 +0800 Subject: [PATCH 300/762] Fix proxy settings retrieval on startup (#18171) Closes https://github.com/zed-industries/zed/issues/18155 Release Notes: - N/A --- crates/zed/src/main.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 309931f6163528..e3fe2baefa9b6c 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -443,6 +443,8 @@ fn main() { AppCommitSha::set_global(AppCommitSha(build_sha.into()), cx); } settings::init(cx); + handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed); + handle_keymap_file_changes(user_keymap_file_rx, cx, handle_keymap_changed); client::init_settings(cx); let user_agent = format!( "Zed/{} ({}; {})", @@ -470,9 +472,6 @@ fn main() { OpenListener::set_global(cx, open_listener.clone()); - handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed); - handle_keymap_file_changes(user_keymap_file_rx, cx, handle_keymap_changed); - let client = Client::production(cx); cx.set_http_client(client.http_client().clone()); let mut languages = LanguageRegistry::new(cx.background_executor().clone()); From 8a7ef4db59668f9dda841a9550abd1cc78651fa3 Mon Sep 17 00:00:00 2001 From: John Cummings Date: Tue, 24 Sep 2024 11:17:17 -0600 Subject: [PATCH 301/762] ollama: Add max tokens for qwen2.5-coder (#18290) --- crates/ollama/src/ollama.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 51c48290488147..e592bfa17717d3 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -82,7 +82,8 @@ fn get_max_tokens(name: &str) -> usize { "llama3" | "gemma2" | "gemma" | "codegemma" | "starcoder" | "aya" => 8192, "codellama" | "starcoder2" => 16384, "mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "dolphin-mixtral" => 32768, - "llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder" => 128000, + "llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder" + | "qwen2.5-coder" => 128000, _ => DEFAULT_TOKENS, } .clamp(1, MAXIMUM_TOKENS) From f39e54decc613314b1302cfbb1d581a056a6eb68 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 12:23:39 -0600 Subject: [PATCH 302/762] Start work on reload buffers (#18245) Release Notes: - Fixed: ssh-remote reload buffers --------- Co-authored-by: Mikayla --- crates/project/src/buffer_store.rs | 309 +++++++++++++----- crates/project/src/project.rs | 139 +------- crates/remote_server/src/headless_project.rs | 3 +- .../remote_server/src/remote_editing_tests.rs | 82 ++++- 4 files changed, 325 insertions(+), 208 deletions(-) diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 0045aba2e89ecf..b69679d6ac3b3a 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -14,7 +14,10 @@ use gpui::{ }; use http_client::Url; use language::{ - proto::{deserialize_line_ending, deserialize_version, serialize_version, split_operations}, + proto::{ + deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version, + split_operations, + }, Buffer, BufferEvent, Capability, File as _, Language, Operation, }; use rpc::{proto, AnyProtoClient, ErrorExt as _, TypedEnvelope}; @@ -29,9 +32,8 @@ use worktree::{ /// A set of open buffers. pub struct BufferStore { - downstream_client: Option, - remote_id: Option, - #[allow(unused)] + state: BufferStoreState, + downstream_client: Option<(AnyProtoClient, u64)>, worktree_store: Model, opened_buffers: HashMap, local_buffer_ids_by_path: HashMap, @@ -44,12 +46,11 @@ pub struct BufferStore { loading_remote_buffers_by_id: HashMap>, remote_buffer_listeners: HashMap, anyhow::Error>>>>, - shared_buffers: HashMap>, + shared_buffers: HashMap>>, } enum OpenBuffer { - Strong(Model), - Weak(WeakModel), + Buffer(WeakModel), Operations(Vec), } @@ -62,6 +63,15 @@ pub enum BufferStoreEvent { }, } +enum BufferStoreState { + Remote { + shared_with_me: HashSet>, + upstream_client: AnyProtoClient, + project_id: u64, + }, + Local {}, +} + #[derive(Default, Debug)] pub struct ProjectTransaction(pub HashMap, language::Transaction>); @@ -75,17 +85,36 @@ impl BufferStore { client.add_model_message_handler(Self::handle_update_diff_base); client.add_model_request_handler(Self::handle_save_buffer); client.add_model_request_handler(Self::handle_blame_buffer); + client.add_model_request_handler(Self::handle_reload_buffers); } /// Creates a buffer store, optionally retaining its buffers. - /// - /// If `retain_buffers` is `true`, then buffers are owned by the buffer store - /// and won't be released unless they are explicitly removed, or `retain_buffers` - /// is set to `false` via `set_retain_buffers`. Otherwise, buffers are stored as - /// weak handles. - pub fn new( + pub fn local(worktree_store: Model, cx: &mut ModelContext) -> Self { + cx.subscribe(&worktree_store, |this, _, event, cx| { + if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { + this.subscribe_to_worktree(worktree, cx); + } + }) + .detach(); + + Self { + state: BufferStoreState::Local {}, + downstream_client: None, + worktree_store, + opened_buffers: Default::default(), + remote_buffer_listeners: Default::default(), + loading_remote_buffers_by_id: Default::default(), + local_buffer_ids_by_path: Default::default(), + local_buffer_ids_by_entry_id: Default::default(), + loading_buffers_by_path: Default::default(), + shared_buffers: Default::default(), + } + } + + pub fn remote( worktree_store: Model, - remote_id: Option, + upstream_client: AnyProtoClient, + remote_id: u64, cx: &mut ModelContext, ) -> Self { cx.subscribe(&worktree_store, |this, _, event, cx| { @@ -96,7 +125,11 @@ impl BufferStore { .detach(); Self { - remote_id, + state: BufferStoreState::Remote { + shared_with_me: Default::default(), + upstream_client, + project_id: remote_id, + }, downstream_client: None, worktree_store, opened_buffers: Default::default(), @@ -288,16 +321,14 @@ impl BufferStore { buffer.set_diff_base(diff_base.clone(), cx); buffer.remote_id().to_proto() }); - if let Some(project_id) = this.remote_id { - if let Some(client) = &this.downstream_client { - client - .send(proto::UpdateDiffBase { - project_id, - buffer_id, - diff_base, - }) - .log_err(); - } + if let Some((client, project_id)) = &this.downstream_client { + client + .send(proto::UpdateDiffBase { + project_id: *project_id, + buffer_id, + diff_base, + }) + .log_err(); } } }) @@ -496,8 +527,8 @@ impl BufferStore { let new_file = save.await?; let mtime = new_file.mtime; this.update(&mut cx, |this, cx| { - if let Some(downstream_client) = this.downstream_client.as_ref() { - let project_id = this.remote_id.unwrap_or(0); + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + let project_id = *project_id; if has_changed_file { downstream_client .send(proto::UpdateBufferFile { @@ -620,11 +651,7 @@ impl BufferStore { fn add_buffer(&mut self, buffer: Model, cx: &mut ModelContext) -> Result<()> { let remote_id = buffer.read(cx).remote_id(); let is_remote = buffer.read(cx).replica_id() != 0; - let open_buffer = if self.remote_id.is_some() { - OpenBuffer::Strong(buffer.clone()) - } else { - OpenBuffer::Weak(buffer.downgrade()) - }; + let open_buffer = OpenBuffer::Buffer(buffer.downgrade()); let handle = cx.handle().downgrade(); buffer.update(cx, move |_, cx| { @@ -768,8 +795,7 @@ impl BufferStore { } pub fn disconnected_from_host(&mut self, cx: &mut AppContext) { - self.downstream_client.take(); - self.set_remote_id(None, cx); + self.drop_unnecessary_buffers(cx); for buffer in self.buffers() { buffer.update(cx, |buffer, cx| { @@ -786,32 +812,20 @@ impl BufferStore { &mut self, remote_id: u64, downstream_client: AnyProtoClient, - cx: &mut AppContext, + _cx: &mut AppContext, ) { - self.downstream_client = Some(downstream_client); - self.set_remote_id(Some(remote_id), cx); + self.downstream_client = Some((downstream_client, remote_id)); } pub fn unshared(&mut self, _cx: &mut ModelContext) { - self.remote_id.take(); + self.downstream_client.take(); + self.forget_shared_buffers(); } - fn set_remote_id(&mut self, remote_id: Option, cx: &mut AppContext) { - self.remote_id = remote_id; + fn drop_unnecessary_buffers(&mut self, cx: &mut AppContext) { for open_buffer in self.opened_buffers.values_mut() { - if remote_id.is_some() { - if let OpenBuffer::Weak(buffer) = open_buffer { - if let Some(buffer) = buffer.upgrade() { - *open_buffer = OpenBuffer::Strong(buffer); - } - } - } else { - if let Some(buffer) = open_buffer.upgrade() { - buffer.update(cx, |buffer, _| buffer.give_up_waiting()); - } - if let OpenBuffer::Strong(buffer) = open_buffer { - *open_buffer = OpenBuffer::Weak(buffer.downgrade()); - } + if let Some(buffer) = open_buffer.upgrade() { + buffer.update(cx, |buffer, _| buffer.give_up_waiting()); } } } @@ -881,8 +895,26 @@ impl BufferStore { event: &BufferEvent, cx: &mut ModelContext, ) { - if event == &BufferEvent::FileHandleChanged { - self.buffer_changed_file(buffer, cx); + match event { + BufferEvent::FileHandleChanged => { + self.buffer_changed_file(buffer, cx); + } + BufferEvent::Reloaded => { + let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else { + return; + }; + let buffer = buffer.read(cx); + downstream_client + .send(proto::BufferReloaded { + project_id: *project_id, + buffer_id: buffer.remote_id().to_proto(), + version: serialize_version(&buffer.version()), + mtime: buffer.saved_mtime().map(|t| t.into()), + line_ending: serialize_line_ending(buffer.line_ending()) as i32, + }) + .log_err(); + } + _ => {} } } @@ -986,16 +1018,14 @@ impl BufferStore { } } - if let Some(project_id) = self.remote_id { - if let Some(client) = &self.downstream_client { - client - .send(proto::UpdateBufferFile { - project_id, - buffer_id: buffer_id.to_proto(), - file: Some(new_file.to_proto(cx)), - }) - .ok(); - } + if let Some((client, project_id)) = &self.downstream_client { + client + .send(proto::UpdateBufferFile { + project_id: *project_id, + buffer_id: buffer_id.to_proto(), + file: Some(new_file.to_proto(cx)), + }) + .ok(); } buffer.file_updated(Arc::new(new_file), cx); @@ -1050,11 +1080,8 @@ impl BufferStore { this.update(&mut cx, |this, cx| { match this.opened_buffers.entry(buffer_id) { hash_map::Entry::Occupied(mut e) => match e.get_mut() { - OpenBuffer::Strong(buffer) => { - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); - } OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops), - OpenBuffer::Weak(buffer) => { + OpenBuffer::Buffer(buffer) => { if let Some(buffer) = buffer.upgrade() { buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } @@ -1090,7 +1117,7 @@ impl BufferStore { self.shared_buffers .entry(guest_id) .or_default() - .insert(buffer_id); + .insert(buffer.clone()); let buffer = buffer.read(cx); response.buffers.push(proto::BufferVersion { @@ -1230,6 +1257,19 @@ impl BufferStore { } } else if chunk.is_last { self.loading_remote_buffers_by_id.remove(&buffer_id); + // retain buffers sent by peers to avoid races. + match &mut self.state { + BufferStoreState::Remote { + ref mut shared_with_me, + upstream_client, + .. + } => { + if upstream_client.is_via_collab() { + shared_with_me.insert(buffer.clone()); + } + } + _ => {} + } self.add_buffer(buffer, cx)?; } } @@ -1303,7 +1343,10 @@ impl BufferStore { let (buffer, project_id) = this.update(&mut cx, |this, _| { anyhow::Ok(( this.get_existing(buffer_id)?, - this.remote_id.context("project is not shared")?, + this.downstream_client + .as_ref() + .map(|(_, project_id)| *project_id) + .context("project is not shared")?, )) })??; buffer @@ -1340,12 +1383,14 @@ impl BufferStore { let peer_id = envelope.sender_id; let buffer_id = BufferId::new(envelope.payload.buffer_id)?; this.update(&mut cx, |this, _| { - if let Some(shared) = this.shared_buffers.get_mut(&peer_id) { - if shared.remove(&buffer_id) { - if shared.is_empty() { - this.shared_buffers.remove(&peer_id); + if let Some(buffer) = this.get(buffer_id) { + if let Some(shared) = this.shared_buffers.get_mut(&peer_id) { + if shared.remove(&buffer) { + if shared.is_empty() { + this.shared_buffers.remove(&peer_id); + } + return; } - return; } }; debug_panic!( @@ -1429,6 +1474,98 @@ impl BufferStore { } } + pub fn reload_buffers( + &self, + buffers: HashSet>, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + let mut local_buffers = Vec::new(); + let mut remote_buffers = Vec::new(); + for buffer_handle in buffers { + let buffer = buffer_handle.read(cx); + if buffer.is_dirty() { + if let Some(file) = File::from_dyn(buffer.file()) { + if file.is_local() { + local_buffers.push(buffer_handle); + } else { + remote_buffers.push(buffer_handle); + } + } + } + } + + let client = self.upstream_client(); + + cx.spawn(move |this, mut cx| async move { + let mut project_transaction = ProjectTransaction::default(); + if let Some((client, project_id)) = client { + let response = client + .request(proto::ReloadBuffers { + project_id, + buffer_ids: remote_buffers + .iter() + .filter_map(|buffer| { + buffer + .update(&mut cx, |buffer, _| buffer.remote_id().into()) + .ok() + }) + .collect(), + }) + .await? + .transaction + .ok_or_else(|| anyhow!("missing transaction"))?; + BufferStore::deserialize_project_transaction( + this, + response, + push_to_history, + cx.clone(), + ) + .await?; + } + + for buffer in local_buffers { + let transaction = buffer + .update(&mut cx, |buffer, cx| buffer.reload(cx))? + .await?; + buffer.update(&mut cx, |buffer, cx| { + if let Some(transaction) = transaction { + if !push_to_history { + buffer.forget_transaction(transaction.id); + } + project_transaction.0.insert(cx.handle(), transaction); + } + })?; + } + + Ok(project_transaction) + }) + } + + async fn handle_reload_buffers( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let sender_id = envelope.original_sender_id().unwrap_or_default(); + let reload = this.update(&mut cx, |this, cx| { + let mut buffers = HashSet::default(); + for buffer_id in &envelope.payload.buffer_ids { + let buffer_id = BufferId::new(*buffer_id)?; + buffers.insert(this.get_existing(buffer_id)?); + } + Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx)) + })??; + + let project_transaction = reload.await?; + let project_transaction = this.update(&mut cx, |this, cx| { + this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) + })?; + Ok(proto::ReloadBuffersResponse { + transaction: Some(project_transaction), + }) + } + pub fn create_buffer_for_peer( &mut self, buffer: &Model, @@ -1440,12 +1577,12 @@ impl BufferStore { .shared_buffers .entry(peer_id) .or_default() - .insert(buffer_id) + .insert(buffer.clone()) { return Task::ready(Ok(())); } - let Some((client, project_id)) = self.downstream_client.clone().zip(self.remote_id) else { + let Some((client, project_id)) = self.downstream_client.clone() else { return Task::ready(Ok(())); }; @@ -1492,6 +1629,17 @@ impl BufferStore { }) } + pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { + match &self.state { + BufferStoreState::Remote { + upstream_client, + project_id, + .. + } => Some((upstream_client.clone(), *project_id)), + BufferStoreState::Local { .. } => None, + } + } + pub fn forget_shared_buffers(&mut self) { self.shared_buffers.clear(); } @@ -1506,7 +1654,7 @@ impl BufferStore { } } - pub fn shared_buffers(&self) -> &HashMap> { + pub fn shared_buffers(&self) -> &HashMap>> { &self.shared_buffers } @@ -1572,8 +1720,7 @@ impl BufferStore { impl OpenBuffer { fn upgrade(&self) -> Option> { match self { - OpenBuffer::Strong(handle) => Some(handle.clone()), - OpenBuffer::Weak(handle) => handle.upgrade(), + OpenBuffer::Buffer(handle) => handle.upgrade(), OpenBuffer::Operations(_) => None, } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 199b5a8f5c5755..454a7586c8856c 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -42,10 +42,7 @@ use gpui::{ use itertools::Itertools; use language::{ language_settings::InlayHintKind, - proto::{ - deserialize_anchor, serialize_anchor, serialize_line_ending, serialize_version, - split_operations, - }, + proto::{deserialize_anchor, serialize_anchor, split_operations}, Buffer, BufferEvent, CachedLspAdapter, Capability, CodeLabel, ContextProvider, DiagnosticEntry, Documentation, File as _, Language, LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped, @@ -559,7 +556,6 @@ impl Project { client.add_model_message_handler(Self::handle_unshare_project); client.add_model_request_handler(Self::handle_update_buffer); client.add_model_message_handler(Self::handle_update_worktree); - client.add_model_request_handler(Self::handle_reload_buffers); client.add_model_request_handler(Self::handle_synchronize_buffers); client.add_model_request_handler(Self::handle_search_project); @@ -599,8 +595,7 @@ impl Project { cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - let buffer_store = - cx.new_model(|cx| BufferStore::new(worktree_store.clone(), None, cx)); + let buffer_store = cx.new_model(|cx| BufferStore::local(worktree_store.clone(), cx)); cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); @@ -695,8 +690,14 @@ impl Project { cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - let buffer_store = - cx.new_model(|cx| BufferStore::new(worktree_store.clone(), None, cx)); + let buffer_store = cx.new_model(|cx| { + BufferStore::remote( + worktree_store.clone(), + ssh.clone().into(), + SSH_PROJECT_ID, + cx, + ) + }); cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); @@ -851,8 +852,9 @@ impl Project { .map(DevServerProjectId), ) })?; - let buffer_store = - cx.new_model(|cx| BufferStore::new(worktree_store.clone(), Some(remote_id), cx))?; + let buffer_store = cx.new_model(|cx| { + BufferStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx) + })?; let lsp_store = cx.new_model(|cx| { let mut lsp_store = LspStore::new_remote( @@ -2167,23 +2169,6 @@ impl Project { .ok(); } - BufferEvent::Reloaded => { - if self.is_local_or_ssh() { - if let Some(project_id) = self.remote_id() { - let buffer = buffer.read(cx); - self.client - .send(proto::BufferReloaded { - project_id, - buffer_id: buffer.remote_id().to_proto(), - version: serialize_version(&buffer.version()), - mtime: buffer.saved_mtime().map(|t| t.into()), - line_ending: serialize_line_ending(buffer.line_ending()) as i32, - }) - .log_err(); - } - } - } - _ => {} } @@ -2347,67 +2332,8 @@ impl Project { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - let mut local_buffers = Vec::new(); - let mut remote_buffers = None; - for buffer_handle in buffers { - let buffer = buffer_handle.read(cx); - if buffer.is_dirty() { - if let Some(file) = File::from_dyn(buffer.file()) { - if file.is_local() { - local_buffers.push(buffer_handle); - } else { - remote_buffers.get_or_insert(Vec::new()).push(buffer_handle); - } - } - } - } - - let remote_buffers = self.remote_id().zip(remote_buffers); - let client = self.client.clone(); - - cx.spawn(move |this, mut cx| async move { - let mut project_transaction = ProjectTransaction::default(); - - if let Some((project_id, remote_buffers)) = remote_buffers { - let response = client - .request(proto::ReloadBuffers { - project_id, - buffer_ids: remote_buffers - .iter() - .filter_map(|buffer| { - buffer - .update(&mut cx, |buffer, _| buffer.remote_id().into()) - .ok() - }) - .collect(), - }) - .await? - .transaction - .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, - response, - push_to_history, - cx.clone(), - ) - .await?; - } - - for buffer in local_buffers { - let transaction = buffer - .update(&mut cx, |buffer, cx| buffer.reload(cx))? - .await?; - buffer.update(&mut cx, |buffer, cx| { - if let Some(transaction) = transaction { - if !push_to_history { - buffer.forget_transaction(transaction.id); - } - project_transaction.0.insert(cx.handle(), transaction); - } - })?; - } - - Ok(project_transaction) + self.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.reload_buffers(buffers, push_to_history, cx) }) } @@ -3589,30 +3515,6 @@ impl Project { })? } - async fn handle_reload_buffers( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let sender_id = envelope.original_sender_id()?; - let reload = this.update(&mut cx, |this, cx| { - let mut buffers = HashSet::default(); - for buffer_id in &envelope.payload.buffer_ids { - let buffer_id = BufferId::new(*buffer_id)?; - buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?); - } - Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx)) - })??; - - let project_transaction = reload.await?; - let project_transaction = this.update(&mut cx, |this, cx| { - this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) - })?; - Ok(proto::ReloadBuffersResponse { - transaction: Some(project_transaction), - }) - } - async fn handle_synchronize_buffers( this: Model, envelope: TypedEnvelope, @@ -3896,17 +3798,6 @@ impl Project { })? } - fn serialize_project_transaction_for_peer( - &mut self, - project_transaction: ProjectTransaction, - peer_id: proto::PeerId, - cx: &mut AppContext, - ) -> proto::ProjectTransaction { - self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.serialize_project_transaction_for_peer(project_transaction, peer_id, cx) - }) - } - fn create_buffer_for_peer( &mut self, buffer: &Model, diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 043f7e95ee026c..0af0d6bb1570dd 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -50,8 +50,7 @@ impl HeadlessProject { store }); let buffer_store = cx.new_model(|cx| { - let mut buffer_store = - BufferStore::new(worktree_store.clone(), Some(SSH_PROJECT_ID), cx); + let mut buffer_store = BufferStore::local(worktree_store.clone(), cx); buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx); buffer_store }); diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 18eb12b445b97b..eca65f1349845c 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -7,6 +7,7 @@ use http_client::FakeHttpClient; use language::{ language_settings::{all_language_settings, AllLanguageSettings}, Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LanguageServerName, + LineEnding, }; use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind}; use node_runtime::NodeRuntime; @@ -18,7 +19,10 @@ use remote::SshSession; use serde_json::json; use settings::{Settings, SettingsLocation, SettingsStore}; use smol::stream::StreamExt; -use std::{path::Path, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; #[gpui::test] async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { @@ -440,6 +444,54 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext }) } +#[gpui::test] +async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let (project, _headless, fs) = init_test(cx, server_cx).await; + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + + let worktree_id = cx.update(|cx| worktree.read(cx).id()); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, Path::new("src/lib.rs")), cx) + }) + .await + .unwrap(); + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, "a")], None, cx); + }); + + fs.save( + &PathBuf::from("/code/project1/src/lib.rs"), + &("bloop".to_string().into()), + LineEnding::Unix, + ) + .await + .unwrap(); + + cx.run_until_parked(); + cx.update(|cx| { + assert!(buffer.read(cx).has_conflict()); + }); + + project + .update(cx, |project, cx| { + project.reload_buffers([buffer.clone()].into_iter().collect(), false, cx) + }) + .await + .unwrap(); + cx.run_until_parked(); + + cx.update(|cx| { + assert!(!buffer.read(cx).has_conflict()); + }); +} + #[gpui::test] async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let (project, _headless, _fs) = init_test(cx, server_cx).await; @@ -483,6 +535,34 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut ); } +#[gpui::test(iterations = 10)] +async fn test_canceling_buffer_opening(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let (project, _headless, _fs) = init_test(cx, server_cx).await; + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + let worktree_id = worktree.read_with(cx, |tree, _| tree.id()); + + // Open a buffer on the client but cancel after a random amount of time. + let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, "src/lib.rs"), cx)); + cx.executor().simulate_random_delay().await; + drop(buffer); + + // Try opening the same buffer again as the client, and ensure we can + // still do it despite the cancellation above. + let buffer = project + .update(cx, |p, cx| p.open_buffer((worktree_id, "src/lib.rs"), cx)) + .await + .unwrap(); + + buffer.read_with(cx, |buf, _| { + assert_eq!(buf.text(), "fn one() -> usize { 1 }") + }); +} + fn init_logger() { if std::env::var("RUST_LOG").is_ok() { env_logger::try_init().ok(); From 96068584362a43ef4dd20055f5c8b3c9710066fd Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 24 Sep 2024 12:08:22 -0700 Subject: [PATCH 303/762] Fix unnecessarily-specific struct pattern in rust outline query (#18297) Fixes https://github.com/zed-industries/zed/issues/18294 Release Notes: - Fixed a recent regression where tuple and unit structs were omitted from the outline view in Rust (#18294). --- crates/languages/src/rust/outline.scm | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/languages/src/rust/outline.scm b/crates/languages/src/rust/outline.scm index 98892ce93cfb35..3012995e2a7f23 100644 --- a/crates/languages/src/rust/outline.scm +++ b/crates/languages/src/rust/outline.scm @@ -4,8 +4,7 @@ (struct_item (visibility_modifier)? @context "struct" @context - name: (_) @name - body: (_ "{" @open (_)* "}" @close)) @item + name: (_) @name) @item (enum_item (visibility_modifier)? @context From 87ac4cff60c05bac9d118df25b13987df6f1a5bd Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 24 Sep 2024 15:42:26 -0400 Subject: [PATCH 304/762] Update close_stale_issues configuration (#18298) @notpeter and I decided on these things: - Give users 10 days to respond - Only run once a week: Tuesday @7AM ET Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 1f287fb5e3d4b9..2d4085524bb8d0 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -1,7 +1,7 @@ name: "Close Stale Issues" on: schedule: - - cron: "0 1 * * *" + - cron: "0 11 * * 2" workflow_dispatch: jobs: @@ -14,7 +14,7 @@ jobs: stale-issue-message: > Hi there! 👋 - We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in a week. Feel free to open a new issue if you're seeing this message after the issue has been closed. + We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in 10 days. Feel free to open a new issue if you're seeing this message after the issue has been closed. Thanks for your help! close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" @@ -23,7 +23,7 @@ jobs: # 'community' to 'zed' repository. The migration added activity to all # issues, preventing 365 days from working until then. days-before-stale: 180 - days-before-close: 7 + days-before-close: 10 any-of-issue-labels: "defect,panic / crash" operations-per-run: 1000 ascending: true From 692590bff435da3bdb7a0bd1bf9f139c3bdc6eb1 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 24 Sep 2024 15:44:55 -0400 Subject: [PATCH 305/762] collab: Fix GitHub user retrieval in seed script (#18296) This PR fixes the GitHub user retrieval in the database seed script. The users returned from the [list users](https://docs.github.com/en/rest/users/users?apiVersion=2022-11-28#list-users) endpoint don't have a `created_at` timestamp, so we need to fetch them individually. I want to rework this further at a later date, this is just a bandaid to get things working again. Release Notes: - N/A --- crates/collab/src/db/queries/users.rs | 6 ++++ crates/collab/src/seed.rs | 43 +++++++++++++++++++++++---- 2 files changed, 44 insertions(+), 5 deletions(-) diff --git a/crates/collab/src/db/queries/users.rs b/crates/collab/src/db/queries/users.rs index b755476e338b60..4443d751542b50 100644 --- a/crates/collab/src/db/queries/users.rs +++ b/crates/collab/src/db/queries/users.rs @@ -298,6 +298,12 @@ impl Database { result } + /// Returns all feature flags. + pub async fn list_feature_flags(&self) -> Result> { + self.transaction(|tx| async move { Ok(feature_flag::Entity::find().all(&*tx).await?) }) + .await + } + /// Creates a new feature flag. pub async fn create_user_flag(&self, flag: &str, enabled_for_all: bool) -> Result { self.transaction(|tx| async move { diff --git a/crates/collab/src/seed.rs b/crates/collab/src/seed.rs index 15aa9d159183f8..035d58109b596d 100644 --- a/crates/collab/src/seed.rs +++ b/crates/collab/src/seed.rs @@ -16,13 +16,23 @@ struct GithubUser { created_at: DateTime, } +/// A GitHub user returned from the [List users](https://docs.github.com/en/rest/users/users?apiVersion=2022-11-28#list-users) endpoint. +/// +/// Notably, this data type does not have the `created_at` field. +#[derive(Debug, Deserialize)] +struct ListGithubUser { + id: i32, + login: String, + email: Option, +} + #[derive(Deserialize)] struct SeedConfig { - // Which users to create as admins. + /// Which users to create as admins. admins: Vec, - // Which channels to create (all admins are invited to all channels) + /// Which channels to create (all admins are invited to all channels). channels: Vec, - // Number of random users to create from the Github API + /// Number of random users to create from the Github API. number_of_users: Option, } @@ -47,11 +57,21 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result let flag_names = ["remoting", "language-models"]; let mut flags = Vec::new(); + let existing_feature_flags = db.list_feature_flags().await?; + for flag_name in flag_names { + if existing_feature_flags + .iter() + .any(|flag| flag.flag == flag_name) + { + log::info!("Flag {flag_name:?} already exists"); + continue; + } + let flag = db .create_user_flag(flag_name, false) .await - .unwrap_or_else(|_| panic!("failed to create flag: '{flag_name}'")); + .unwrap_or_else(|err| panic!("failed to create flag: '{flag_name}': {err}")); flags.push(flag); } @@ -121,9 +141,19 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result if let Some(last_user_id) = last_user_id { write!(&mut uri, "&since={}", last_user_id).unwrap(); } - let users = fetch_github::>(&client, &uri).await; + let users = fetch_github::>(&client, &uri).await; for github_user in users { + log::info!("Seeding {:?} from GitHub", github_user.login); + + // Fetch the user to get their `created_at` timestamp, since it + // isn't on the list response. + let github_user: GithubUser = fetch_github( + &client, + &format!("https://api.github.com/user/{}", github_user.id), + ) + .await; + last_user_id = Some(github_user.id); user_count += 1; let user = db @@ -143,6 +173,9 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result flag, user.id ))?; } + + // Sleep to avoid getting rate-limited by GitHub. + tokio::time::sleep(std::time::Duration::from_millis(250)).await; } } } From d2ffad0f34322d9a332860e566f63aec01a83fe7 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 24 Sep 2024 16:35:09 -0400 Subject: [PATCH 306/762] collab: Seed GitHub users from static data (#18301) This PR updates the collab seed script to seed the GitHub users from a set of static data. This removes the need to hit the GitHub API to retrieve these users. Release Notes: - N/A --- crates/collab/README.md | 3 +- crates/collab/seed.default.json | 3 +- crates/collab/seed/github_users.json | 602 +++++++++++++++++++++++++++ crates/collab/src/seed.rs | 89 ++-- 4 files changed, 630 insertions(+), 67 deletions(-) create mode 100644 crates/collab/seed/github_users.json diff --git a/crates/collab/README.md b/crates/collab/README.md index 345e82aefed78f..5aa964ee792fee 100644 --- a/crates/collab/README.md +++ b/crates/collab/README.md @@ -23,8 +23,7 @@ To use a different set of admin users, create `crates/collab/seed.json`. ```json { "admins": ["yourgithubhere"], - "channels": ["zed"], - "number_of_users": 20 + "channels": ["zed"] } ``` diff --git a/crates/collab/seed.default.json b/crates/collab/seed.default.json index 1abec644beed9f..dee924e103d620 100644 --- a/crates/collab/seed.default.json +++ b/crates/collab/seed.default.json @@ -8,6 +8,5 @@ "JosephTLyons", "rgbkrk" ], - "channels": ["zed"], - "number_of_users": 100 + "channels": ["zed"] } diff --git a/crates/collab/seed/github_users.json b/crates/collab/seed/github_users.json new file mode 100644 index 00000000000000..88acd6aa54a709 --- /dev/null +++ b/crates/collab/seed/github_users.json @@ -0,0 +1,602 @@ +[ + { + "id": 1, + "login": "mojombo", + "email": "tom@mojombo.com", + "created_at": "2007-10-20T05:24:19Z" + }, + { + "id": 2, + "login": "defunkt", + "email": null, + "created_at": "2007-10-20T05:24:19Z" + }, + { + "id": 3, + "login": "pjhyett", + "email": "pj@hyett.com", + "created_at": "2008-01-07T17:54:22Z" + }, + { + "id": 4, + "login": "wycats", + "email": "wycats@gmail.com", + "created_at": "2008-01-12T05:38:33Z" + }, + { + "id": 5, + "login": "ezmobius", + "email": null, + "created_at": "2008-01-12T07:51:46Z" + }, + { + "id": 6, + "login": "ivey", + "email": "ivey@gweezlebur.com", + "created_at": "2008-01-12T15:15:00Z" + }, + { + "id": 7, + "login": "evanphx", + "email": "evan@phx.io", + "created_at": "2008-01-12T16:46:24Z" + }, + { + "id": 17, + "login": "vanpelt", + "email": "vanpelt@wandb.com", + "created_at": "2008-01-13T05:57:18Z" + }, + { + "id": 18, + "login": "wayneeseguin", + "email": "wayneeseguin@gmail.com", + "created_at": "2008-01-13T06:02:21Z" + }, + { + "id": 19, + "login": "brynary", + "email": null, + "created_at": "2008-01-13T10:19:47Z" + }, + { + "id": 20, + "login": "kevinclark", + "email": "kevin.clark@gmail.com", + "created_at": "2008-01-13T18:33:26Z" + }, + { + "id": 21, + "login": "technoweenie", + "email": "technoweenie@hey.com", + "created_at": "2008-01-14T04:33:35Z" + }, + { + "id": 22, + "login": "macournoyer", + "email": "macournoyer@gmail.com", + "created_at": "2008-01-14T10:49:35Z" + }, + { + "id": 23, + "login": "takeo", + "email": "toby@takeo.email", + "created_at": "2008-01-14T11:25:49Z" + }, + { + "id": 25, + "login": "caged", + "email": "encytemedia@gmail.com", + "created_at": "2008-01-15T04:47:24Z" + }, + { + "id": 26, + "login": "topfunky", + "email": null, + "created_at": "2008-01-15T05:40:05Z" + }, + { + "id": 27, + "login": "anotherjesse", + "email": "anotherjesse@gmail.com", + "created_at": "2008-01-15T07:49:30Z" + }, + { + "id": 28, + "login": "roland", + "email": null, + "created_at": "2008-01-15T08:12:51Z" + }, + { + "id": 29, + "login": "lukas", + "email": "lukas@wandb.com", + "created_at": "2008-01-15T12:50:02Z" + }, + { + "id": 30, + "login": "fanvsfan", + "email": null, + "created_at": "2008-01-15T14:15:23Z" + }, + { + "id": 31, + "login": "tomtt", + "email": null, + "created_at": "2008-01-15T15:44:31Z" + }, + { + "id": 32, + "login": "railsjitsu", + "email": null, + "created_at": "2008-01-16T04:57:23Z" + }, + { + "id": 34, + "login": "nitay", + "email": null, + "created_at": "2008-01-18T14:09:11Z" + }, + { + "id": 35, + "login": "kevwil", + "email": null, + "created_at": "2008-01-19T05:50:12Z" + }, + { + "id": 36, + "login": "KirinDave", + "email": null, + "created_at": "2008-01-19T08:01:02Z" + }, + { + "id": 37, + "login": "jamesgolick", + "email": "jamesgolick@gmail.com", + "created_at": "2008-01-19T22:52:30Z" + }, + { + "id": 38, + "login": "atmos", + "email": "atmos@atmos.org", + "created_at": "2008-01-22T09:14:11Z" + }, + { + "id": 44, + "login": "errfree", + "email": null, + "created_at": "2008-01-24T02:08:37Z" + }, + { + "id": 45, + "login": "mojodna", + "email": null, + "created_at": "2008-01-24T04:40:22Z" + }, + { + "id": 46, + "login": "bmizerany", + "email": "blake.mizerany@gmail.com", + "created_at": "2008-01-24T04:44:30Z" + }, + { + "id": 47, + "login": "jnewland", + "email": "jesse@jnewland.com", + "created_at": "2008-01-25T02:28:12Z" + }, + { + "id": 48, + "login": "joshknowles", + "email": "joshknowles@gmail.com", + "created_at": "2008-01-25T21:30:42Z" + }, + { + "id": 49, + "login": "hornbeck", + "email": "hornbeck@gmail.com", + "created_at": "2008-01-25T21:49:23Z" + }, + { + "id": 50, + "login": "jwhitmire", + "email": "jeff@jwhitmire.com", + "created_at": "2008-01-25T22:07:48Z" + }, + { + "id": 51, + "login": "elbowdonkey", + "email": null, + "created_at": "2008-01-25T22:08:20Z" + }, + { + "id": 52, + "login": "reinh", + "email": null, + "created_at": "2008-01-25T22:16:29Z" + }, + { + "id": 53, + "login": "knzai", + "email": "git@knz.ai", + "created_at": "2008-01-25T22:33:10Z" + }, + { + "id": 68, + "login": "bs", + "email": "yap@bri.tt", + "created_at": "2008-01-27T01:46:29Z" + }, + { + "id": 69, + "login": "rsanheim", + "email": null, + "created_at": "2008-01-27T07:09:47Z" + }, + { + "id": 70, + "login": "schacon", + "email": "schacon@gmail.com", + "created_at": "2008-01-27T17:19:28Z" + }, + { + "id": 71, + "login": "uggedal", + "email": null, + "created_at": "2008-01-27T22:18:57Z" + }, + { + "id": 72, + "login": "bruce", + "email": "brwcodes@gmail.com", + "created_at": "2008-01-28T07:16:45Z" + }, + { + "id": 73, + "login": "sam", + "email": "ssmoot@gmail.com", + "created_at": "2008-01-28T19:01:26Z" + }, + { + "id": 74, + "login": "mmower", + "email": "self@mattmower.com", + "created_at": "2008-01-28T19:47:50Z" + }, + { + "id": 75, + "login": "abhay", + "email": null, + "created_at": "2008-01-28T21:08:23Z" + }, + { + "id": 76, + "login": "rabble", + "email": "evan@protest.net", + "created_at": "2008-01-28T23:27:02Z" + }, + { + "id": 77, + "login": "benburkert", + "email": "ben@benburkert.com", + "created_at": "2008-01-28T23:44:14Z" + }, + { + "id": 78, + "login": "indirect", + "email": "andre@arko.net", + "created_at": "2008-01-29T07:59:27Z" + }, + { + "id": 79, + "login": "fearoffish", + "email": "me@fearof.fish", + "created_at": "2008-01-29T08:43:10Z" + }, + { + "id": 80, + "login": "ry", + "email": "ry@tinyclouds.org", + "created_at": "2008-01-29T08:50:34Z" + }, + { + "id": 81, + "login": "engineyard", + "email": null, + "created_at": "2008-01-29T09:51:30Z" + }, + { + "id": 82, + "login": "jsierles", + "email": null, + "created_at": "2008-01-29T11:10:25Z" + }, + { + "id": 83, + "login": "tweibley", + "email": null, + "created_at": "2008-01-29T13:52:07Z" + }, + { + "id": 84, + "login": "peimei", + "email": "james@railsjitsu.com", + "created_at": "2008-01-29T15:44:11Z" + }, + { + "id": 85, + "login": "brixen", + "email": "brixen@gmail.com", + "created_at": "2008-01-29T16:47:55Z" + }, + { + "id": 87, + "login": "tmornini", + "email": null, + "created_at": "2008-01-29T18:43:39Z" + }, + { + "id": 88, + "login": "outerim", + "email": "lee@outerim.com", + "created_at": "2008-01-29T18:48:32Z" + }, + { + "id": 89, + "login": "daksis", + "email": null, + "created_at": "2008-01-29T19:18:16Z" + }, + { + "id": 90, + "login": "sr", + "email": "me@simonrozet.com", + "created_at": "2008-01-29T20:37:53Z" + }, + { + "id": 91, + "login": "lifo", + "email": null, + "created_at": "2008-01-29T23:09:30Z" + }, + { + "id": 92, + "login": "rsl", + "email": "sconds@gmail.com", + "created_at": "2008-01-29T23:13:36Z" + }, + { + "id": 93, + "login": "imownbey", + "email": null, + "created_at": "2008-01-29T23:13:44Z" + }, + { + "id": 94, + "login": "dylanegan", + "email": null, + "created_at": "2008-01-29T23:15:18Z" + }, + { + "id": 95, + "login": "jm", + "email": "jeremymcanally@gmail.com", + "created_at": "2008-01-29T23:15:32Z" + }, + { + "id": 100, + "login": "kmarsh", + "email": "kevin.marsh@gmail.com", + "created_at": "2008-01-29T23:48:24Z" + }, + { + "id": 101, + "login": "jvantuyl", + "email": "jayson@aggressive.ly", + "created_at": "2008-01-30T01:11:50Z" + }, + { + "id": 102, + "login": "BrianTheCoder", + "email": "wbsmith83@gmail.com", + "created_at": "2008-01-30T02:22:32Z" + }, + { + "id": 103, + "login": "freeformz", + "email": "freeformz@gmail.com", + "created_at": "2008-01-30T06:19:57Z" + }, + { + "id": 104, + "login": "hassox", + "email": "dneighman@gmail.com", + "created_at": "2008-01-30T06:31:06Z" + }, + { + "id": 105, + "login": "automatthew", + "email": "automatthew@gmail.com", + "created_at": "2008-01-30T19:00:58Z" + }, + { + "id": 106, + "login": "queso", + "email": "Joshua.owens@gmail.com", + "created_at": "2008-01-30T19:48:45Z" + }, + { + "id": 107, + "login": "lancecarlson", + "email": null, + "created_at": "2008-01-30T19:53:29Z" + }, + { + "id": 108, + "login": "drnic", + "email": "drnicwilliams@gmail.com", + "created_at": "2008-01-30T23:19:18Z" + }, + { + "id": 109, + "login": "lukesutton", + "email": null, + "created_at": "2008-01-31T04:01:02Z" + }, + { + "id": 110, + "login": "danwrong", + "email": null, + "created_at": "2008-01-31T08:51:31Z" + }, + { + "id": 111, + "login": "HamptonMakes", + "email": "hampton@hamptoncatlin.com", + "created_at": "2008-01-31T17:03:51Z" + }, + { + "id": 112, + "login": "jfrost", + "email": null, + "created_at": "2008-01-31T22:14:27Z" + }, + { + "id": 113, + "login": "mattetti", + "email": null, + "created_at": "2008-01-31T22:56:31Z" + }, + { + "id": 114, + "login": "ctennis", + "email": "c@leb.tennis", + "created_at": "2008-01-31T23:43:14Z" + }, + { + "id": 115, + "login": "lawrencepit", + "email": "lawrence.pit@gmail.com", + "created_at": "2008-01-31T23:57:16Z" + }, + { + "id": 116, + "login": "marcjeanson", + "email": "github@marcjeanson.com", + "created_at": "2008-02-01T01:27:19Z" + }, + { + "id": 117, + "login": "grempe", + "email": null, + "created_at": "2008-02-01T04:12:42Z" + }, + { + "id": 118, + "login": "peterc", + "email": "git@peterc.org", + "created_at": "2008-02-02T01:00:36Z" + }, + { + "id": 119, + "login": "ministrycentered", + "email": null, + "created_at": "2008-02-02T03:50:26Z" + }, + { + "id": 120, + "login": "afarnham", + "email": null, + "created_at": "2008-02-02T05:11:03Z" + }, + { + "id": 121, + "login": "up_the_irons", + "email": null, + "created_at": "2008-02-02T10:59:51Z" + }, + { + "id": 122, + "login": "cristibalan", + "email": "cristibalan@gmail.com", + "created_at": "2008-02-02T11:29:45Z" + }, + { + "id": 123, + "login": "heavysixer", + "email": null, + "created_at": "2008-02-02T15:06:53Z" + }, + { + "id": 124, + "login": "brosner", + "email": "brosner@gmail.com", + "created_at": "2008-02-02T19:03:54Z" + }, + { + "id": 125, + "login": "danielmorrison", + "email": "daniel@collectiveidea.com", + "created_at": "2008-02-02T19:46:35Z" + }, + { + "id": 126, + "login": "danielharan", + "email": "chebuctonian@gmail.com", + "created_at": "2008-02-02T21:42:21Z" + }, + { + "id": 127, + "login": "kvnsmth", + "email": null, + "created_at": "2008-02-02T22:00:03Z" + }, + { + "id": 128, + "login": "collectiveidea", + "email": "info@collectiveidea.com", + "created_at": "2008-02-02T22:34:46Z" + }, + { + "id": 129, + "login": "canadaduane", + "email": "duane.johnson@gmail.com", + "created_at": "2008-02-02T23:25:39Z" + }, + { + "id": 130, + "login": "corasaurus-hex", + "email": "cora@sutton.me", + "created_at": "2008-02-03T04:20:22Z" + }, + { + "id": 131, + "login": "dstrelau", + "email": null, + "created_at": "2008-02-03T14:59:12Z" + }, + { + "id": 132, + "login": "sunny", + "email": "sunny@sunfox.org", + "created_at": "2008-02-03T15:43:43Z" + }, + { + "id": 133, + "login": "dkubb", + "email": "github@dan.kubb.ca", + "created_at": "2008-02-03T20:40:13Z" + }, + { + "id": 134, + "login": "jnicklas", + "email": "jonas@jnicklas.com", + "created_at": "2008-02-03T20:43:50Z" + }, + { + "id": 135, + "login": "richcollins", + "email": "richcollins@gmail.com", + "created_at": "2008-02-03T21:11:25Z" + } +] diff --git a/crates/collab/src/seed.rs b/crates/collab/src/seed.rs index 035d58109b596d..5de6515ae3ac89 100644 --- a/crates/collab/src/seed.rs +++ b/crates/collab/src/seed.rs @@ -4,10 +4,13 @@ use anyhow::Context; use chrono::{DateTime, Utc}; use db::Database; use serde::{de::DeserializeOwned, Deserialize}; -use std::{fmt::Write, fs, path::Path}; +use std::{fs, path::Path}; use crate::Config; +/// A GitHub user. +/// +/// This representation corresponds to the entries in the `seed/github_users.json` file. #[derive(Debug, Deserialize)] struct GithubUser { id: i32, @@ -16,24 +19,12 @@ struct GithubUser { created_at: DateTime, } -/// A GitHub user returned from the [List users](https://docs.github.com/en/rest/users/users?apiVersion=2022-11-28#list-users) endpoint. -/// -/// Notably, this data type does not have the `created_at` field. -#[derive(Debug, Deserialize)] -struct ListGithubUser { - id: i32, - login: String, - email: Option, -} - #[derive(Deserialize)] struct SeedConfig { /// Which users to create as admins. admins: Vec, /// Which channels to create (all admins are invited to all channels). channels: Vec, - /// Number of random users to create from the Github API. - number_of_users: Option, } pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result<()> { @@ -126,57 +117,29 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result } } - // TODO: Fix this later - if let Some(number_of_users) = seed_config.number_of_users { - // Fetch 100 other random users from GitHub and insert them into the database - // (for testing autocompleters, etc.) - let mut user_count = db - .get_all_users(0, 200) + let github_users_filepath = seed_path.parent().unwrap().join("seed/github_users.json"); + let github_users: Vec = + serde_json::from_str(&fs::read_to_string(github_users_filepath)?)?; + + for github_user in github_users { + log::info!("Seeding {:?} from GitHub", github_user.login); + + let user = db + .get_or_create_user_by_github_account( + &github_user.login, + github_user.id, + github_user.email.as_deref(), + github_user.created_at, + None, + ) .await - .expect("failed to load users from db") - .len(); - let mut last_user_id = None; - while user_count < number_of_users { - let mut uri = "https://api.github.com/users?per_page=100".to_string(); - if let Some(last_user_id) = last_user_id { - write!(&mut uri, "&since={}", last_user_id).unwrap(); - } - let users = fetch_github::>(&client, &uri).await; - - for github_user in users { - log::info!("Seeding {:?} from GitHub", github_user.login); - - // Fetch the user to get their `created_at` timestamp, since it - // isn't on the list response. - let github_user: GithubUser = fetch_github( - &client, - &format!("https://api.github.com/user/{}", github_user.id), - ) - .await; - - last_user_id = Some(github_user.id); - user_count += 1; - let user = db - .get_or_create_user_by_github_account( - &github_user.login, - github_user.id, - github_user.email.as_deref(), - github_user.created_at, - None, - ) - .await - .expect("failed to insert user"); - - for flag in &flags { - db.add_user_flag(user.id, *flag).await.context(format!( - "Unable to enable flag '{}' for user '{}'", - flag, user.id - ))?; - } - - // Sleep to avoid getting rate-limited by GitHub. - tokio::time::sleep(std::time::Duration::from_millis(250)).await; - } + .expect("failed to insert user"); + + for flag in &flags { + db.add_user_flag(user.id, *flag).await.context(format!( + "Unable to enable flag '{}' for user '{}'", + flag, user.id + ))?; } } From 2d71c36ad3ec7b4b000c6144089c7a2294d0a19c Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 25 Sep 2024 00:29:56 +0300 Subject: [PATCH 307/762] Allow clearning activity indicators on click (#18305) All indicators without the click action are now could be hidden with a click. Sometimes, I see a few language server updates statuses get stuck due to npm desperately attempting to access its registry (3 times per each package, with the timeout a bit under 1 minute per each request). So, while the message seems stuck, npm desperately tries to do some work in the background. https://docs.npmjs.com/cli/v10/using-npm/config has options for timeouts & retries for __package fetching__ but that does not include the actual __registry access attempts__. It's unclear how to proceed with npm on this case now, but at least we should allow hiding these redundant messages. Release Notes: - Improved activity indicators' UX by allowing more of them to be hidden on click --- .../src/activity_indicator.rs | 72 +++++++++++++++---- 1 file changed, 59 insertions(+), 13 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index fee0ef73f7bee1..52e6acc393d299 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -227,10 +227,10 @@ impl ActivityIndicator { for status in &self.statuses { match status.status { LanguageServerBinaryStatus::CheckingForUpdate => { - checking_for_update.push(status.name.0.as_ref()) + checking_for_update.push(status.name.clone()) } - LanguageServerBinaryStatus::Downloading => downloading.push(status.name.0.as_ref()), - LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.0.as_ref()), + LanguageServerBinaryStatus::Downloading => downloading.push(status.name.clone()), + LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.clone()), LanguageServerBinaryStatus::None => {} } } @@ -242,8 +242,24 @@ impl ActivityIndicator { .size(IconSize::Small) .into_any_element(), ), - message: format!("Downloading {}...", downloading.join(", "),), - on_click: None, + message: format!( + "Downloading {}...", + downloading.iter().map(|name| name.0.as_ref()).fold( + String::new(), + |mut acc, s| { + if !acc.is_empty() { + acc.push_str(", "); + } + acc.push_str(s); + acc + } + ) + ), + on_click: Some(Arc::new(move |this, cx| { + this.statuses + .retain(|status| !downloading.contains(&status.name)); + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }); } @@ -256,9 +272,22 @@ impl ActivityIndicator { ), message: format!( "Checking for updates to {}...", - checking_for_update.join(", "), + checking_for_update.iter().map(|name| name.0.as_ref()).fold( + String::new(), + |mut acc, s| { + if !acc.is_empty() { + acc.push_str(", "); + } + acc.push_str(s); + acc + } + ), ), - on_click: None, + on_click: Some(Arc::new(move |this, cx| { + this.statuses + .retain(|status| !checking_for_update.contains(&status.name)); + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }); } @@ -271,7 +300,16 @@ impl ActivityIndicator { ), message: format!( "Failed to download {}. Click to show error.", - failed.join(", "), + failed + .iter() + .map(|name| name.0.as_ref()) + .fold(String::new(), |mut acc, s| { + if !acc.is_empty() { + acc.push_str(", "); + } + acc.push_str(s); + acc + }), ), on_click: Some(Arc::new(|this, cx| { this.show_error_message(&Default::default(), cx) @@ -304,7 +342,9 @@ impl ActivityIndicator { .into_any_element(), ), message: "Checking for Zed updates…".to_string(), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }), AutoUpdateStatus::Downloading => Some(Content { icon: Some( @@ -313,7 +353,9 @@ impl ActivityIndicator { .into_any_element(), ), message: "Downloading Zed update…".to_string(), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }), AutoUpdateStatus::Installing => Some(Content { icon: Some( @@ -322,7 +364,9 @@ impl ActivityIndicator { .into_any_element(), ), message: "Installing Zed update…".to_string(), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }), AutoUpdateStatus::Updated { binary_path } => Some(Content { icon: None, @@ -342,7 +386,7 @@ impl ActivityIndicator { ), message: "Auto update failed".to_string(), on_click: Some(Arc::new(|this, cx| { - this.dismiss_error_message(&Default::default(), cx) + this.dismiss_error_message(&DismissErrorMessage, cx) })), }), AutoUpdateStatus::Idle => None, @@ -360,7 +404,9 @@ impl ActivityIndicator { .into_any_element(), ), message: format!("Updating {extension_id} extension…"), - on_click: None, + on_click: Some(Arc::new(|this, cx| { + this.dismiss_error_message(&DismissErrorMessage, cx) + })), }); } } From 5045f984a90dd26ba0c0e2fc8ce6cbab70ba5b75 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 15:37:09 -0600 Subject: [PATCH 308/762] Tidy up some broken menu items (#18306) Release Notes: - ssh-remoting: Don't show "reveal in finder" in menu --- crates/outline_panel/src/outline_panel.rs | 6 ++++-- crates/project_panel/src/project_panel.rs | 9 ++++++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 73570dd5afc775..da66ca40313d89 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -3906,9 +3906,11 @@ impl Render for OutlinePanel { .on_action(cx.listener(Self::toggle_active_editor_pin)) .on_action(cx.listener(Self::unfold_directory)) .on_action(cx.listener(Self::fold_directory)) - .when(project.is_local_or_ssh(), |el| { + .when(project.is_local(), |el| { el.on_action(cx.listener(Self::reveal_in_finder)) - .on_action(cx.listener(Self::open_in_terminal)) + }) + .when(project.is_local_or_ssh(), |el| { + el.on_action(cx.listener(Self::open_in_terminal)) }) .on_mouse_down( MouseButton::Right, diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index cd4196dbc67b15..8e741134f0e446 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -484,6 +484,7 @@ impl ProjectPanel { let worktree_id = worktree.id(); let is_read_only = project.is_read_only(); let is_remote = project.is_via_collab() && project.dev_server_project_id().is_none(); + let is_local = project.is_local(); let context_menu = ContextMenu::build(cx, |menu, cx| { menu.context(self.focus_handle.clone()).map(|menu| { @@ -495,13 +496,15 @@ impl ProjectPanel { menu.action("New File", Box::new(NewFile)) .action("New Folder", Box::new(NewDirectory)) .separator() - .when(cfg!(target_os = "macos"), |menu| { + .when(is_local && cfg!(target_os = "macos"), |menu| { menu.action("Reveal in Finder", Box::new(RevealInFileManager)) }) - .when(cfg!(not(target_os = "macos")), |menu| { + .when(is_local && cfg!(not(target_os = "macos")), |menu| { menu.action("Reveal in File Manager", Box::new(RevealInFileManager)) }) - .action("Open in Default App", Box::new(OpenWithSystem)) + .when(is_local, |menu| { + menu.action("Open in Default App", Box::new(OpenWithSystem)) + }) .action("Open in Terminal", Box::new(OpenInTerminal)) .when(is_dir, |menu| { menu.separator() From da1ef13442e095414e23db86623a5b5acd117cd3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 24 Sep 2024 14:39:44 -0700 Subject: [PATCH 309/762] Fix detection that a diff hunk is expanded (#18302) Release Notes: - N/A --------- Co-authored-by: Marshall --- crates/collab/src/tests/editor_tests.rs | 5 +---- crates/editor/src/element.rs | 6 +++++- crates/editor/src/hunk_diff.rs | 8 ++++++-- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 7fb1a49f870d97..121c93656305a6 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -2214,10 +2214,7 @@ struct Row10;"#}; let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(5)] - ); + assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); assert_eq!( all_hunks, vec![( diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 3be71aeefba942..31e4efb83b60e7 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1335,7 +1335,11 @@ impl EditorElement { } match status { DiffHunkStatus::Added => {} - DiffHunkStatus::Modified => {} + DiffHunkStatus::Modified => { + if is_expanded { + *status = DiffHunkStatus::Added; + } + } DiffHunkStatus::Removed => { if is_expanded { return None; diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 90836cee51683c..2f7bb49e853491 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -279,8 +279,12 @@ impl Editor { ..Point::new(remaining_hunk.row_range.end.0, 0); hunks_to_expand.push(HoveredHunk { status: hunk_status(&remaining_hunk), - multi_buffer_range: remaining_hunk_point_range - .to_anchors(&snapshot.buffer_snapshot), + multi_buffer_range: snapshot + .buffer_snapshot + .anchor_before(remaining_hunk_point_range.start) + ..snapshot + .buffer_snapshot + .anchor_after(remaining_hunk_point_range.end), diff_base_byte_range: remaining_hunk.diff_base_byte_range.clone(), }); } From c4e0f5e0ee83e02567a5512b0f5fafef49225e66 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 15:52:30 -0600 Subject: [PATCH 310/762] Rebuild buffer store to be aware of remote/local distinction (#18303) Release Notes: - N/A --------- Co-authored-by: Mikayla --- .../remote_editing_collaboration_tests.rs | 27 +- crates/project/src/buffer_store.rs | 1981 ++++++++++------- crates/project/src/lsp_command.rs | 23 +- crates/project/src/lsp_store.rs | 33 +- crates/project/src/project.rs | 18 +- .../remote_server/src/remote_editing_tests.rs | 1 + 6 files changed, 1192 insertions(+), 891 deletions(-) diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index a81166bb00ceec..bad5ef9053ce70 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -3,6 +3,7 @@ use call::ActiveCall; use fs::{FakeFs, Fs as _}; use gpui::{Context as _, TestAppContext}; use language::language_settings::all_language_settings; +use project::ProjectPath; use remote::SshSession; use remote_server::HeadlessProject; use serde_json::json; @@ -108,14 +109,36 @@ async fn test_sharing_an_ssh_remote_project( }); project_b - .update(cx_b, |project, cx| project.save_buffer(buffer_b, cx)) + .update(cx_b, |project, cx| { + project.save_buffer_as( + buffer_b.clone(), + ProjectPath { + worktree_id: worktree_id.to_owned(), + path: Arc::from(Path::new("src/renamed.rs")), + }, + cx, + ) + }) .await .unwrap(); assert_eq!( remote_fs - .load("/code/project1/src/lib.rs".as_ref()) + .load("/code/project1/src/renamed.rs".as_ref()) .await .unwrap(), "fn one() -> usize { 100 }" ); + cx_b.run_until_parked(); + cx_b.update(|cx| { + assert_eq!( + buffer_b + .read(cx) + .file() + .unwrap() + .path() + .to_string_lossy() + .to_string(), + "src/renamed.rs".to_string() + ); + }); } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index b69679d6ac3b3a..aa86a8f7e256e8 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -10,7 +10,8 @@ use fs::Fs; use futures::{channel::oneshot, stream::FuturesUnordered, StreamExt}; use git::blame::Blame; use gpui::{ - AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Task, WeakModel, + AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Subscription, + Task, WeakModel, }; use http_client::Url; use language::{ @@ -25,27 +26,72 @@ use smol::channel::Receiver; use std::{io, path::Path, str::FromStr as _, sync::Arc, time::Instant}; use text::BufferId; use util::{debug_panic, maybe, ResultExt as _, TryFutureExt}; -use worktree::{ - File, PathChange, ProjectEntryId, RemoteWorktree, UpdatedGitRepositoriesSet, Worktree, - WorktreeId, -}; +use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId}; -/// A set of open buffers. -pub struct BufferStore { - state: BufferStoreState, - downstream_client: Option<(AnyProtoClient, u64)>, +trait BufferStoreImpl { + fn open_buffer( + &self, + path: Arc, + worktree: Model, + cx: &mut ModelContext, + ) -> Task>>; + + fn save_buffer( + &self, + buffer: Model, + cx: &mut ModelContext, + ) -> Task>; + + fn save_buffer_as( + &self, + buffer: Model, + path: ProjectPath, + cx: &mut ModelContext, + ) -> Task>; + + fn create_buffer(&self, cx: &mut ModelContext) -> Task>>; + + fn reload_buffers( + &self, + buffers: Vec>, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task>; + + fn as_remote(&self) -> Option>; + fn as_local(&self) -> Option>; +} + +struct RemoteBufferStore { + shared_with_me: HashSet>, + upstream_client: AnyProtoClient, + project_id: u64, + loading_remote_buffers_by_id: HashMap>, + remote_buffer_listeners: + HashMap, anyhow::Error>>>>, worktree_store: Model, - opened_buffers: HashMap, + buffer_store: WeakModel, +} + +struct LocalBufferStore { local_buffer_ids_by_path: HashMap, local_buffer_ids_by_entry_id: HashMap, + buffer_store: WeakModel, + worktree_store: Model, + _subscription: Subscription, +} + +/// A set of open buffers. +pub struct BufferStore { + state: Box, #[allow(clippy::type_complexity)] loading_buffers_by_path: HashMap< ProjectPath, postage::watch::Receiver, Arc>>>, >, - loading_remote_buffers_by_id: HashMap>, - remote_buffer_listeners: - HashMap, anyhow::Error>>>>, + worktree_store: Model, + opened_buffers: HashMap, + downstream_client: Option<(AnyProtoClient, u64)>, shared_buffers: HashMap>>, } @@ -63,19 +109,858 @@ pub enum BufferStoreEvent { }, } -enum BufferStoreState { - Remote { - shared_with_me: HashSet>, - upstream_client: AnyProtoClient, - project_id: u64, - }, - Local {}, +#[derive(Default, Debug)] +pub struct ProjectTransaction(pub HashMap, language::Transaction>); + +impl EventEmitter for BufferStore {} + +impl RemoteBufferStore { + pub fn wait_for_remote_buffer( + &mut self, + id: BufferId, + cx: &mut AppContext, + ) -> Task>> { + let buffer_store = self.buffer_store.clone(); + let (tx, rx) = oneshot::channel(); + self.remote_buffer_listeners.entry(id).or_default().push(tx); + + cx.spawn(|cx| async move { + if let Some(buffer) = buffer_store + .read_with(&cx, |buffer_store, _| buffer_store.get(id)) + .ok() + .flatten() + { + return Ok(buffer); + } + + cx.background_executor() + .spawn(async move { rx.await? }) + .await + }) + } + + fn save_remote_buffer( + &self, + buffer_handle: Model, + new_path: Option, + cx: &ModelContext, + ) -> Task> { + let buffer = buffer_handle.read(cx); + let buffer_id = buffer.remote_id().into(); + let version = buffer.version(); + let rpc = self.upstream_client.clone(); + let project_id = self.project_id; + cx.spawn(move |_, mut cx| async move { + let response = rpc + .request(proto::SaveBuffer { + project_id, + buffer_id, + new_path, + version: serialize_version(&version), + }) + .await?; + let version = deserialize_version(&response.version); + let mtime = response.mtime.map(|mtime| mtime.into()); + + buffer_handle.update(&mut cx, |buffer, cx| { + buffer.did_save(version.clone(), mtime, cx); + })?; + + Ok(()) + }) + } + + pub fn handle_create_buffer_for_peer( + &mut self, + envelope: TypedEnvelope, + replica_id: u16, + capability: Capability, + cx: &mut ModelContext, + ) -> Result>> { + match envelope + .payload + .variant + .ok_or_else(|| anyhow!("missing variant"))? + { + proto::create_buffer_for_peer::Variant::State(mut state) => { + let buffer_id = BufferId::new(state.id)?; + + let buffer_result = maybe!({ + let mut buffer_file = None; + if let Some(file) = state.file.take() { + let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id); + let worktree = self + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .ok_or_else(|| { + anyhow!("no worktree found for id {}", file.worktree_id) + })?; + buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?) + as Arc); + } + Buffer::from_proto(replica_id, capability, state, buffer_file) + }); + + match buffer_result { + Ok(buffer) => { + let buffer = cx.new_model(|_| buffer); + self.loading_remote_buffers_by_id.insert(buffer_id, buffer); + } + Err(error) => { + if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) { + for listener in listeners { + listener.send(Err(anyhow!(error.cloned()))).ok(); + } + } + } + } + } + proto::create_buffer_for_peer::Variant::Chunk(chunk) => { + let buffer_id = BufferId::new(chunk.buffer_id)?; + let buffer = self + .loading_remote_buffers_by_id + .get(&buffer_id) + .cloned() + .ok_or_else(|| { + anyhow!( + "received chunk for buffer {} without initial state", + chunk.buffer_id + ) + })?; + + let result = maybe!({ + let operations = chunk + .operations + .into_iter() + .map(language::proto::deserialize_operation) + .collect::>>()?; + buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)); + anyhow::Ok(()) + }); + + if let Err(error) = result { + self.loading_remote_buffers_by_id.remove(&buffer_id); + if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) { + for listener in listeners { + listener.send(Err(error.cloned())).ok(); + } + } + } else if chunk.is_last { + self.loading_remote_buffers_by_id.remove(&buffer_id); + if self.upstream_client.is_via_collab() { + // retain buffers sent by peers to avoid races. + self.shared_with_me.insert(buffer.clone()); + } + + if let Some(senders) = self.remote_buffer_listeners.remove(&buffer_id) { + for sender in senders { + sender.send(Ok(buffer.clone())).ok(); + } + } + return Ok(Some(buffer)); + } + } + } + return Ok(None); + } + + pub fn incomplete_buffer_ids(&self) -> Vec { + self.loading_remote_buffers_by_id + .keys() + .copied() + .collect::>() + } + + pub fn deserialize_project_transaction( + &self, + message: proto::ProjectTransaction, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + cx.spawn(|this, mut cx| async move { + let mut project_transaction = ProjectTransaction::default(); + for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions) + { + let buffer_id = BufferId::new(buffer_id)?; + let buffer = this + .update(&mut cx, |this, cx| { + this.wait_for_remote_buffer(buffer_id, cx) + })? + .await?; + let transaction = language::proto::deserialize_transaction(transaction)?; + project_transaction.0.insert(buffer, transaction); + } + + for (buffer, transaction) in &project_transaction.0 { + buffer + .update(&mut cx, |buffer, _| { + buffer.wait_for_edits(transaction.edit_ids.iter().copied()) + })? + .await?; + + if push_to_history { + buffer.update(&mut cx, |buffer, _| { + buffer.push_transaction(transaction.clone(), Instant::now()); + })?; + } + } + + Ok(project_transaction) + }) + } } -#[derive(Default, Debug)] -pub struct ProjectTransaction(pub HashMap, language::Transaction>); +impl BufferStoreImpl for Model { + fn as_remote(&self) -> Option> { + Some(self.clone()) + } + + fn as_local(&self) -> Option> { + None + } + + fn save_buffer( + &self, + buffer: Model, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + this.save_remote_buffer(buffer.clone(), None, cx) + }) + } + fn save_buffer_as( + &self, + buffer: Model, + path: ProjectPath, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + this.save_remote_buffer(buffer, Some(path.to_proto()), cx) + }) + } + + fn open_buffer( + &self, + path: Arc, + worktree: Model, + cx: &mut ModelContext, + ) -> Task>> { + self.update(cx, |this, cx| { + let worktree_id = worktree.read(cx).id().to_proto(); + let project_id = this.project_id; + let client = this.upstream_client.clone(); + let path_string = path.clone().to_string_lossy().to_string(); + cx.spawn(move |this, mut cx| async move { + let response = client + .request(proto::OpenBufferByPath { + project_id, + worktree_id, + path: path_string, + }) + .await?; + let buffer_id = BufferId::new(response.buffer_id)?; + + let buffer = this + .update(&mut cx, { + |this, cx| this.wait_for_remote_buffer(buffer_id, cx) + })? + .await?; + + Ok(buffer) + }) + }) + } + + fn create_buffer(&self, cx: &mut ModelContext) -> Task>> { + self.update(cx, |this, cx| { + let create = this.upstream_client.request(proto::OpenNewBuffer { + project_id: this.project_id, + }); + cx.spawn(|this, mut cx| async move { + let response = create.await?; + let buffer_id = BufferId::new(response.buffer_id)?; + + this.update(&mut cx, |this, cx| { + this.wait_for_remote_buffer(buffer_id, cx) + })? + .await + }) + }) + } + + fn reload_buffers( + &self, + buffers: Vec>, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + let request = this.upstream_client.request(proto::ReloadBuffers { + project_id: this.project_id, + buffer_ids: buffers + .iter() + .map(|buffer| buffer.read(cx).remote_id().to_proto()) + .collect(), + }); + + cx.spawn(|this, mut cx| async move { + let response = request + .await? + .transaction + .ok_or_else(|| anyhow!("missing transaction"))?; + this.update(&mut cx, |this, cx| { + this.deserialize_project_transaction(response, push_to_history, cx) + })? + .await + }) + }) + } +} + +impl LocalBufferStore { + fn save_local_buffer( + &self, + buffer_handle: Model, + worktree: Model, + path: Arc, + mut has_changed_file: bool, + cx: &mut ModelContext, + ) -> Task> { + let buffer = buffer_handle.read(cx); + + let text = buffer.as_rope().clone(); + let line_ending = buffer.line_ending(); + let version = buffer.version(); + let buffer_id = buffer.remote_id(); + if buffer.file().is_some_and(|file| !file.is_created()) { + has_changed_file = true; + } + + let save = worktree.update(cx, |worktree, cx| { + worktree.write_file(path.as_ref(), text, line_ending, cx) + }); + + cx.spawn(move |this, mut cx| async move { + let new_file = save.await?; + let mtime = new_file.mtime; + this.update(&mut cx, |this, cx| { + if let Some((downstream_client, project_id)) = this.downstream_client(cx) { + if has_changed_file { + downstream_client + .send(proto::UpdateBufferFile { + project_id, + buffer_id: buffer_id.to_proto(), + file: Some(language::File::to_proto(&*new_file, cx)), + }) + .log_err(); + } + downstream_client + .send(proto::BufferSaved { + project_id, + buffer_id: buffer_id.to_proto(), + version: serialize_version(&version), + mtime: mtime.map(|time| time.into()), + }) + .log_err(); + } + })?; + buffer_handle.update(&mut cx, |buffer, cx| { + if has_changed_file { + buffer.file_updated(new_file, cx); + } + buffer.did_save(version.clone(), mtime, cx); + }) + }) + } + + fn subscribe_to_worktree(&mut self, worktree: &Model, cx: &mut ModelContext) { + cx.subscribe(worktree, |this, worktree, event, cx| { + if worktree.read(cx).is_local() { + match event { + worktree::Event::UpdatedEntries(changes) => { + this.local_worktree_entries_changed(&worktree, changes, cx); + } + worktree::Event::UpdatedGitRepositories(updated_repos) => { + this.local_worktree_git_repos_changed(worktree.clone(), updated_repos, cx) + } + _ => {} + } + } + }) + .detach(); + } + + fn local_worktree_entries_changed( + &mut self, + worktree_handle: &Model, + changes: &[(Arc, ProjectEntryId, PathChange)], + cx: &mut ModelContext, + ) { + let snapshot = worktree_handle.read(cx).snapshot(); + for (path, entry_id, _) in changes { + self.local_worktree_entry_changed(*entry_id, path, worktree_handle, &snapshot, cx); + } + } + + fn local_worktree_git_repos_changed( + &mut self, + worktree_handle: Model, + changed_repos: &UpdatedGitRepositoriesSet, + cx: &mut ModelContext, + ) { + debug_assert!(worktree_handle.read(cx).is_local()); + let Some(buffer_store) = self.buffer_store.upgrade() else { + return; + }; + + // Identify the loading buffers whose containing repository that has changed. + let (future_buffers, current_buffers) = buffer_store.update(cx, |buffer_store, cx| { + let future_buffers = buffer_store + .loading_buffers() + .filter_map(|(project_path, receiver)| { + if project_path.worktree_id != worktree_handle.read(cx).id() { + return None; + } + let path = &project_path.path; + changed_repos + .iter() + .find(|(work_dir, _)| path.starts_with(work_dir))?; + let path = path.clone(); + Some(async move { + BufferStore::wait_for_loading_buffer(receiver) + .await + .ok() + .map(|buffer| (buffer, path)) + }) + }) + .collect::>(); + + // Identify the current buffers whose containing repository has changed. + let current_buffers = buffer_store + .buffers() + .filter_map(|buffer| { + let file = File::from_dyn(buffer.read(cx).file())?; + if file.worktree != worktree_handle { + return None; + } + changed_repos + .iter() + .find(|(work_dir, _)| file.path.starts_with(work_dir))?; + Some((buffer, file.path.clone())) + }) + .collect::>(); + (future_buffers, current_buffers) + }); + + if future_buffers.len() + current_buffers.len() == 0 { + return; + } + + cx.spawn(move |this, mut cx| async move { + // Wait for all of the buffers to load. + let future_buffers = future_buffers.collect::>().await; + + // Reload the diff base for every buffer whose containing git repository has changed. + let snapshot = + worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?; + let diff_bases_by_buffer = cx + .background_executor() + .spawn(async move { + let mut diff_base_tasks = future_buffers + .into_iter() + .flatten() + .chain(current_buffers) + .filter_map(|(buffer, path)| { + let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?; + let relative_path = repo_entry.relativize(&snapshot, &path).ok()?; + Some(async move { + let base_text = + local_repo_entry.repo().load_index_text(&relative_path); + Some((buffer, base_text)) + }) + }) + .collect::>(); + + let mut diff_bases = Vec::with_capacity(diff_base_tasks.len()); + while let Some(diff_base) = diff_base_tasks.next().await { + if let Some(diff_base) = diff_base { + diff_bases.push(diff_base); + } + } + diff_bases + }) + .await; + + this.update(&mut cx, |this, cx| { + // Assign the new diff bases on all of the buffers. + for (buffer, diff_base) in diff_bases_by_buffer { + let buffer_id = buffer.update(cx, |buffer, cx| { + buffer.set_diff_base(diff_base.clone(), cx); + buffer.remote_id().to_proto() + }); + if let Some((client, project_id)) = &this.downstream_client(cx) { + client + .send(proto::UpdateDiffBase { + project_id: *project_id, + buffer_id, + diff_base, + }) + .log_err(); + } + } + }) + }) + .detach_and_log_err(cx); + } + + fn local_worktree_entry_changed( + &mut self, + entry_id: ProjectEntryId, + path: &Arc, + worktree: &Model, + snapshot: &worktree::Snapshot, + cx: &mut ModelContext, + ) -> Option<()> { + let project_path = ProjectPath { + worktree_id: snapshot.id(), + path: path.clone(), + }; + let buffer_id = match self.local_buffer_ids_by_entry_id.get(&entry_id) { + Some(&buffer_id) => buffer_id, + None => self.local_buffer_ids_by_path.get(&project_path).copied()?, + }; + let buffer = self + .buffer_store + .update(cx, |buffer_store, _| { + if let Some(buffer) = buffer_store.get(buffer_id) { + Some(buffer) + } else { + buffer_store.opened_buffers.remove(&buffer_id); + None + } + }) + .ok() + .flatten(); + let buffer = if let Some(buffer) = buffer { + buffer + } else { + self.local_buffer_ids_by_path.remove(&project_path); + self.local_buffer_ids_by_entry_id.remove(&entry_id); + return None; + }; + + let events = buffer.update(cx, |buffer, cx| { + let file = buffer.file()?; + let old_file = File::from_dyn(Some(file))?; + if old_file.worktree != *worktree { + return None; + } + + let new_file = if let Some(entry) = old_file + .entry_id + .and_then(|entry_id| snapshot.entry_for_id(entry_id)) + { + File { + is_local: true, + entry_id: Some(entry.id), + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree.clone(), + is_deleted: false, + is_private: entry.is_private, + } + } else if let Some(entry) = snapshot.entry_for_path(old_file.path.as_ref()) { + File { + is_local: true, + entry_id: Some(entry.id), + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree.clone(), + is_deleted: false, + is_private: entry.is_private, + } + } else { + File { + is_local: true, + entry_id: old_file.entry_id, + path: old_file.path.clone(), + mtime: old_file.mtime, + worktree: worktree.clone(), + is_deleted: true, + is_private: old_file.is_private, + } + }; + + if new_file == *old_file { + return None; + } + + let mut events = Vec::new(); + if new_file.path != old_file.path { + self.local_buffer_ids_by_path.remove(&ProjectPath { + path: old_file.path.clone(), + worktree_id: old_file.worktree_id(cx), + }); + self.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: new_file.worktree_id(cx), + path: new_file.path.clone(), + }, + buffer_id, + ); + events.push(BufferStoreEvent::BufferChangedFilePath { + buffer: cx.handle(), + old_file: buffer.file().cloned(), + }); + } + + if new_file.entry_id != old_file.entry_id { + if let Some(entry_id) = old_file.entry_id { + self.local_buffer_ids_by_entry_id.remove(&entry_id); + } + if let Some(entry_id) = new_file.entry_id { + self.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); + } + } + + if let Some((client, project_id)) = &self.downstream_client(cx) { + client + .send(proto::UpdateBufferFile { + project_id: *project_id, + buffer_id: buffer_id.to_proto(), + file: Some(new_file.to_proto(cx)), + }) + .ok(); + } + + buffer.file_updated(Arc::new(new_file), cx); + Some(events) + })?; + self.buffer_store + .update(cx, |_buffer_store, cx| { + for event in events { + cx.emit(event); + } + }) + .log_err()?; + + None + } + + fn downstream_client(&self, cx: &AppContext) -> Option<(AnyProtoClient, u64)> { + self.buffer_store + .upgrade()? + .read(cx) + .downstream_client + .clone() + } + + fn buffer_changed_file(&mut self, buffer: Model, cx: &mut AppContext) -> Option<()> { + let file = File::from_dyn(buffer.read(cx).file())?; + + let remote_id = buffer.read(cx).remote_id(); + if let Some(entry_id) = file.entry_id { + match self.local_buffer_ids_by_entry_id.get(&entry_id) { + Some(_) => { + return None; + } + None => { + self.local_buffer_ids_by_entry_id + .insert(entry_id, remote_id); + } + } + }; + self.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + remote_id, + ); + + Some(()) + } +} + +impl BufferStoreImpl for Model { + fn as_remote(&self) -> Option> { + None + } + + fn as_local(&self) -> Option> { + Some(self.clone()) + } + + fn save_buffer( + &self, + buffer: Model, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + let Some(file) = File::from_dyn(buffer.read(cx).file()) else { + return Task::ready(Err(anyhow!("buffer doesn't have a file"))); + }; + let worktree = file.worktree.clone(); + this.save_local_buffer(buffer, worktree, file.path.clone(), false, cx) + }) + } + + fn save_buffer_as( + &self, + buffer: Model, + path: ProjectPath, + cx: &mut ModelContext, + ) -> Task> { + self.update(cx, |this, cx| { + let Some(worktree) = this + .worktree_store + .read(cx) + .worktree_for_id(path.worktree_id, cx) + else { + return Task::ready(Err(anyhow!("no such worktree"))); + }; + this.save_local_buffer(buffer, worktree, path.path.clone(), true, cx) + }) + } + + fn open_buffer( + &self, + path: Arc, + worktree: Model, + cx: &mut ModelContext, + ) -> Task>> { + let buffer_store = cx.weak_model(); + self.update(cx, |_, cx| { + let load_buffer = worktree.update(cx, |worktree, cx| { + let load_file = worktree.load_file(path.as_ref(), cx); + let reservation = cx.reserve_model(); + let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); + cx.spawn(move |_, mut cx| async move { + let loaded = load_file.await?; + let text_buffer = cx + .background_executor() + .spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) }) + .await; + cx.insert_model(reservation, |_| { + Buffer::build( + text_buffer, + loaded.diff_base, + Some(loaded.file), + Capability::ReadWrite, + ) + }) + }) + }); + + cx.spawn(move |this, mut cx| async move { + let buffer = match load_buffer.await { + Ok(buffer) => Ok(buffer), + Err(error) if is_not_found_error(&error) => cx.new_model(|cx| { + let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); + let text_buffer = text::Buffer::new(0, buffer_id, "".into()); + Buffer::build( + text_buffer, + None, + Some(Arc::new(File { + worktree, + path, + mtime: None, + entry_id: None, + is_local: true, + is_deleted: false, + is_private: false, + })), + Capability::ReadWrite, + ) + }), + Err(e) => Err(e), + }?; + this.update(&mut cx, |this, cx| { + buffer_store.update(cx, |buffer_store, cx| { + buffer_store.add_buffer(buffer.clone(), cx) + })??; + let buffer_id = buffer.read(cx).remote_id(); + if let Some(file) = File::from_dyn(buffer.read(cx).file()) { + this.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + buffer_id, + ); + + if let Some(entry_id) = file.entry_id { + this.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); + } + } + + anyhow::Ok(()) + })??; + + Ok(buffer) + }) + }) + } + + fn create_buffer(&self, cx: &mut ModelContext) -> Task>> { + let handle = self.clone(); + cx.spawn(|buffer_store, mut cx| async move { + let buffer = cx.new_model(|cx| { + Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx) + })?; + buffer_store.update(&mut cx, |buffer_store, cx| { + buffer_store.add_buffer(buffer.clone(), cx).log_err(); + let buffer_id = buffer.read(cx).remote_id(); + handle.update(cx, |this, cx| { + if let Some(file) = File::from_dyn(buffer.read(cx).file()) { + this.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + buffer_id, + ); + + if let Some(entry_id) = file.entry_id { + this.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); + } + } + }); + })?; + Ok(buffer) + }) + } + + fn reload_buffers( + &self, + buffers: Vec>, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + cx.spawn(move |_, mut cx| async move { + let mut project_transaction = ProjectTransaction::default(); + for buffer in buffers { + let transaction = buffer + .update(&mut cx, |buffer, cx| buffer.reload(cx))? + .await?; + buffer.update(&mut cx, |buffer, cx| { + if let Some(transaction) = transaction { + if !push_to_history { + buffer.forget_transaction(transaction.id); + } + project_transaction.0.insert(cx.handle(), transaction); + } + })?; + } -impl EventEmitter for BufferStore {} + Ok(project_transaction) + }) + } +} impl BufferStore { pub fn init(client: &AnyProtoClient) { @@ -90,24 +975,31 @@ impl BufferStore { /// Creates a buffer store, optionally retaining its buffers. pub fn local(worktree_store: Model, cx: &mut ModelContext) -> Self { - cx.subscribe(&worktree_store, |this, _, event, cx| { - if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { - this.subscribe_to_worktree(worktree, cx); - } - }) - .detach(); - + let this = cx.weak_model(); Self { - state: BufferStoreState::Local {}, + state: Box::new(cx.new_model(|cx| { + let subscription = cx.subscribe( + &worktree_store, + |this: &mut LocalBufferStore, _, event, cx| { + if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { + this.subscribe_to_worktree(worktree, cx); + } + }, + ); + + LocalBufferStore { + local_buffer_ids_by_path: Default::default(), + local_buffer_ids_by_entry_id: Default::default(), + buffer_store: this, + worktree_store: worktree_store.clone(), + _subscription: subscription, + } + })), downstream_client: None, - worktree_store, opened_buffers: Default::default(), - remote_buffer_listeners: Default::default(), - loading_remote_buffers_by_id: Default::default(), - local_buffer_ids_by_path: Default::default(), - local_buffer_ids_by_entry_id: Default::default(), - loading_buffers_by_path: Default::default(), shared_buffers: Default::default(), + loading_buffers_by_path: Default::default(), + worktree_store, } } @@ -117,28 +1009,22 @@ impl BufferStore { remote_id: u64, cx: &mut ModelContext, ) -> Self { - cx.subscribe(&worktree_store, |this, _, event, cx| { - if let WorktreeStoreEvent::WorktreeAdded(worktree) = event { - this.subscribe_to_worktree(worktree, cx); - } - }) - .detach(); - + let this = cx.weak_model(); Self { - state: BufferStoreState::Remote { + state: Box::new(cx.new_model(|_| RemoteBufferStore { shared_with_me: Default::default(), - upstream_client, + loading_remote_buffers_by_id: Default::default(), + remote_buffer_listeners: Default::default(), project_id: remote_id, - }, + upstream_client, + worktree_store: worktree_store.clone(), + buffer_store: this, + })), downstream_client: None, - worktree_store, opened_buffers: Default::default(), - remote_buffer_listeners: Default::default(), - loading_remote_buffers_by_id: Default::default(), - local_buffer_ids_by_path: Default::default(), - local_buffer_ids_by_entry_id: Default::default(), loading_buffers_by_path: Default::default(), shared_buffers: Default::default(), + worktree_store, } } @@ -171,18 +1057,13 @@ impl BufferStore { entry.insert(rx.clone()); let project_path = project_path.clone(); - let load_buffer = match worktree.read(cx) { - Worktree::Local(_) => { - self.open_local_buffer_internal(project_path.path.clone(), worktree, cx) - } - Worktree::Remote(tree) => { - self.open_remote_buffer_internal(&project_path.path, tree, cx) - } - }; + let load_buffer = self + .state + .open_buffer(project_path.path.clone(), worktree, cx); cx.spawn(move |this, mut cx| async move { let load_result = load_buffer.await; - *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| { + *tx.borrow_mut() = Some(this.update(&mut cx, |this, _cx| { // Record the fact that the buffer is no longer loading. this.loading_buffers_by_path.remove(&project_path); let buffer = load_result.map_err(Arc::new)?; @@ -201,391 +1082,32 @@ impl BufferStore { .map_err(|e| e.cloned()) }) } - - fn subscribe_to_worktree(&mut self, worktree: &Model, cx: &mut ModelContext) { - cx.subscribe(worktree, |this, worktree, event, cx| { - if worktree.read(cx).is_local() { - match event { - worktree::Event::UpdatedEntries(changes) => { - this.local_worktree_entries_changed(&worktree, changes, cx); - } - worktree::Event::UpdatedGitRepositories(updated_repos) => { - this.local_worktree_git_repos_changed(worktree.clone(), updated_repos, cx) - } - _ => {} - } - } - }) - .detach(); - } - - fn local_worktree_entries_changed( - &mut self, - worktree_handle: &Model, - changes: &[(Arc, ProjectEntryId, PathChange)], - cx: &mut ModelContext, - ) { - let snapshot = worktree_handle.read(cx).snapshot(); - for (path, entry_id, _) in changes { - self.local_worktree_entry_changed(*entry_id, path, worktree_handle, &snapshot, cx); - } - } - - fn local_worktree_git_repos_changed( - &mut self, - worktree_handle: Model, - changed_repos: &UpdatedGitRepositoriesSet, - cx: &mut ModelContext, - ) { - debug_assert!(worktree_handle.read(cx).is_local()); - - // Identify the loading buffers whose containing repository that has changed. - let future_buffers = self - .loading_buffers() - .filter_map(|(project_path, receiver)| { - if project_path.worktree_id != worktree_handle.read(cx).id() { - return None; - } - let path = &project_path.path; - changed_repos - .iter() - .find(|(work_dir, _)| path.starts_with(work_dir))?; - let path = path.clone(); - Some(async move { - Self::wait_for_loading_buffer(receiver) - .await - .ok() - .map(|buffer| (buffer, path)) - }) - }) - .collect::>(); - - // Identify the current buffers whose containing repository has changed. - let current_buffers = self - .buffers() - .filter_map(|buffer| { - let file = File::from_dyn(buffer.read(cx).file())?; - if file.worktree != worktree_handle { - return None; - } - changed_repos - .iter() - .find(|(work_dir, _)| file.path.starts_with(work_dir))?; - Some((buffer, file.path.clone())) - }) - .collect::>(); - - if future_buffers.len() + current_buffers.len() == 0 { - return; - } - - cx.spawn(move |this, mut cx| async move { - // Wait for all of the buffers to load. - let future_buffers = future_buffers.collect::>().await; - - // Reload the diff base for every buffer whose containing git repository has changed. - let snapshot = - worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?; - let diff_bases_by_buffer = cx - .background_executor() - .spawn(async move { - let mut diff_base_tasks = future_buffers - .into_iter() - .flatten() - .chain(current_buffers) - .filter_map(|(buffer, path)| { - let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?; - let relative_path = repo_entry.relativize(&snapshot, &path).ok()?; - Some(async move { - let base_text = - local_repo_entry.repo().load_index_text(&relative_path); - Some((buffer, base_text)) - }) - }) - .collect::>(); - - let mut diff_bases = Vec::with_capacity(diff_base_tasks.len()); - while let Some(diff_base) = diff_base_tasks.next().await { - if let Some(diff_base) = diff_base { - diff_bases.push(diff_base); - } - } - diff_bases - }) - .await; - - this.update(&mut cx, |this, cx| { - // Assign the new diff bases on all of the buffers. - for (buffer, diff_base) in diff_bases_by_buffer { - let buffer_id = buffer.update(cx, |buffer, cx| { - buffer.set_diff_base(diff_base.clone(), cx); - buffer.remote_id().to_proto() - }); - if let Some((client, project_id)) = &this.downstream_client { - client - .send(proto::UpdateDiffBase { - project_id: *project_id, - buffer_id, - diff_base, - }) - .log_err(); - } - } - }) - }) - .detach_and_log_err(cx); - } - - fn open_local_buffer_internal( - &mut self, - path: Arc, - worktree: Model, - cx: &mut ModelContext, - ) -> Task>> { - let load_buffer = worktree.update(cx, |worktree, cx| { - let load_file = worktree.load_file(path.as_ref(), cx); - let reservation = cx.reserve_model(); - let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); - cx.spawn(move |_, mut cx| async move { - let loaded = load_file.await?; - let text_buffer = cx - .background_executor() - .spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) }) - .await; - cx.insert_model(reservation, |_| { - Buffer::build( - text_buffer, - loaded.diff_base, - Some(loaded.file), - Capability::ReadWrite, - ) - }) - }) - }); - - cx.spawn(move |this, mut cx| async move { - let buffer = match load_buffer.await { - Ok(buffer) => Ok(buffer), - Err(error) if is_not_found_error(&error) => cx.new_model(|cx| { - let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); - let text_buffer = text::Buffer::new(0, buffer_id, "".into()); - Buffer::build( - text_buffer, - None, - Some(Arc::new(File { - worktree, - path, - mtime: None, - entry_id: None, - is_local: true, - is_deleted: false, - is_private: false, - })), - Capability::ReadWrite, - ) - }), - Err(e) => Err(e), - }?; - this.update(&mut cx, |this, cx| { - this.add_buffer(buffer.clone(), cx).log_err(); - })?; - Ok(buffer) - }) - } - - fn open_remote_buffer_internal( - &self, - path: &Arc, - worktree: &RemoteWorktree, - cx: &ModelContext, - ) -> Task>> { - let worktree_id = worktree.id().to_proto(); - let project_id = worktree.project_id(); - let client = worktree.client(); - let path_string = path.clone().to_string_lossy().to_string(); - cx.spawn(move |this, mut cx| async move { - let response = client - .request(proto::OpenBufferByPath { - project_id, - worktree_id, - path: path_string, - }) - .await?; - let buffer_id = BufferId::new(response.buffer_id)?; - this.update(&mut cx, |this, cx| { - this.wait_for_remote_buffer(buffer_id, cx) - })? - .await - }) - } - - pub fn create_buffer( - &mut self, - remote_client: Option<(AnyProtoClient, u64)>, - cx: &mut ModelContext, - ) -> Task>> { - if let Some((remote_client, project_id)) = remote_client { - let create = remote_client.request(proto::OpenNewBuffer { project_id }); - cx.spawn(|this, mut cx| async move { - let response = create.await?; - let buffer_id = BufferId::new(response.buffer_id)?; - - this.update(&mut cx, |this, cx| { - this.wait_for_remote_buffer(buffer_id, cx) - })? - .await - }) - } else { - Task::ready(Ok(self.create_local_buffer("", None, cx))) - } - } - - pub fn create_local_buffer( - &mut self, - text: &str, - language: Option>, - cx: &mut ModelContext, - ) -> Model { - let buffer = cx.new_model(|cx| { - Buffer::local(text, cx) - .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx) - }); - self.add_buffer(buffer.clone(), cx).log_err(); - buffer - } - - pub fn save_buffer( - &mut self, - buffer: Model, - cx: &mut ModelContext, - ) -> Task> { - let Some(file) = File::from_dyn(buffer.read(cx).file()) else { - return Task::ready(Err(anyhow!("buffer doesn't have a file"))); - }; - match file.worktree.read(cx) { - Worktree::Local(_) => { - self.save_local_buffer(file.worktree.clone(), buffer, file.path.clone(), false, cx) - } - Worktree::Remote(tree) => self.save_remote_buffer(buffer, None, tree, cx), - } - } - - pub fn save_buffer_as( - &mut self, - buffer: Model, - path: ProjectPath, - cx: &mut ModelContext, - ) -> Task> { - let Some(worktree) = self - .worktree_store - .read(cx) - .worktree_for_id(path.worktree_id, cx) - else { - return Task::ready(Err(anyhow!("no such worktree"))); - }; - - let old_file = buffer.read(cx).file().cloned(); - - let task = match worktree.read(cx) { - Worktree::Local(_) => { - self.save_local_buffer(worktree, buffer.clone(), path.path, true, cx) - } - Worktree::Remote(tree) => { - self.save_remote_buffer(buffer.clone(), Some(path.to_proto()), tree, cx) - } - }; - cx.spawn(|this, mut cx| async move { - task.await?; - this.update(&mut cx, |_, cx| { - cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file }); - }) - }) + + pub fn create_buffer(&mut self, cx: &mut ModelContext) -> Task>> { + self.state.create_buffer(cx) } - fn save_local_buffer( - &self, - worktree: Model, - buffer_handle: Model, - path: Arc, - mut has_changed_file: bool, + pub fn save_buffer( + &mut self, + buffer: Model, cx: &mut ModelContext, ) -> Task> { - let buffer = buffer_handle.read(cx); - let text = buffer.as_rope().clone(); - let line_ending = buffer.line_ending(); - let version = buffer.version(); - let buffer_id = buffer.remote_id(); - if buffer.file().is_some_and(|file| !file.is_created()) { - has_changed_file = true; - } - - let save = worktree.update(cx, |worktree, cx| { - worktree.write_file(path.as_ref(), text, line_ending, cx) - }); - - cx.spawn(move |this, mut cx| async move { - let new_file = save.await?; - let mtime = new_file.mtime; - this.update(&mut cx, |this, cx| { - if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { - let project_id = *project_id; - if has_changed_file { - downstream_client - .send(proto::UpdateBufferFile { - project_id, - buffer_id: buffer_id.to_proto(), - file: Some(language::File::to_proto(&*new_file, cx)), - }) - .log_err(); - } - downstream_client - .send(proto::BufferSaved { - project_id, - buffer_id: buffer_id.to_proto(), - version: serialize_version(&version), - mtime: mtime.map(|time| time.into()), - }) - .log_err(); - } - })?; - buffer_handle.update(&mut cx, |buffer, cx| { - if has_changed_file { - buffer.file_updated(new_file, cx); - } - buffer.did_save(version.clone(), mtime, cx); - }) - }) + self.state.save_buffer(buffer, cx) } - fn save_remote_buffer( - &self, - buffer_handle: Model, - new_path: Option, - tree: &RemoteWorktree, - cx: &ModelContext, + pub fn save_buffer_as( + &mut self, + buffer: Model, + path: ProjectPath, + cx: &mut ModelContext, ) -> Task> { - let buffer = buffer_handle.read(cx); - let buffer_id = buffer.remote_id().into(); - let version = buffer.version(); - let rpc = tree.client(); - let project_id = tree.project_id(); - cx.spawn(move |_, mut cx| async move { - let response = rpc - .request(proto::SaveBuffer { - project_id, - buffer_id, - new_path, - version: serialize_version(&version), - }) - .await?; - let version = deserialize_version(&response.version); - let mtime = response.mtime.map(|mtime| mtime.into()); - - buffer_handle.update(&mut cx, |buffer, cx| { - buffer.did_save(version.clone(), mtime, cx); - })?; - - Ok(()) + let old_file = buffer.read(cx).file().cloned(); + let task = self.state.save_buffer_as(buffer.clone(), path, cx); + cx.spawn(|this, mut cx| async move { + task.await?; + this.update(&mut cx, |_, cx| { + cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file }); + }) }) } @@ -684,29 +1206,6 @@ impl BufferStore { } } - if let Some(senders) = self.remote_buffer_listeners.remove(&remote_id) { - for sender in senders { - sender.send(Ok(buffer.clone())).ok(); - } - } - - if let Some(file) = File::from_dyn(buffer.read(cx).file()) { - if file.is_local { - self.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: file.worktree_id(cx), - path: file.path.clone(), - }, - remote_id, - ); - - if let Some(entry_id) = file.entry_id { - self.local_buffer_ids_by_entry_id - .insert(entry_id, remote_id); - } - } - } - cx.subscribe(&buffer, Self::on_buffer_event).detach(); cx.emit(BufferStoreEvent::BufferAdded(buffer)); Ok(()) @@ -753,23 +1252,20 @@ impl BufferStore { .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id)) } - pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option> { - self.get(buffer_id) - .or_else(|| self.loading_remote_buffers_by_id.get(&buffer_id).cloned()) - } - - pub fn wait_for_remote_buffer( - &mut self, - id: BufferId, - cx: &mut AppContext, - ) -> Task>> { - let buffer = self.get(id); - if let Some(buffer) = buffer { - return Task::ready(Ok(buffer)); - } - let (tx, rx) = oneshot::channel(); - self.remote_buffer_listeners.entry(id).or_default().push(tx); - cx.background_executor().spawn(async move { rx.await? }) + pub fn get_possibly_incomplete( + &self, + buffer_id: BufferId, + cx: &AppContext, + ) -> Option> { + self.get(buffer_id).or_else(|| { + self.state.as_remote().and_then(|remote| { + remote + .read(cx) + .loading_remote_buffers_by_id + .get(&buffer_id) + .cloned() + }) + }) } pub fn buffer_version_info( @@ -787,15 +1283,19 @@ impl BufferStore { }) .collect(); let incomplete_buffer_ids = self - .loading_remote_buffers_by_id - .keys() - .copied() - .collect::>(); + .state + .as_remote() + .map(|remote| remote.read(cx).incomplete_buffer_ids()) + .unwrap_or_default(); (buffers, incomplete_buffer_ids) } pub fn disconnected_from_host(&mut self, cx: &mut AppContext) { - self.drop_unnecessary_buffers(cx); + for open_buffer in self.opened_buffers.values_mut() { + if let Some(buffer) = open_buffer.upgrade() { + buffer.update(cx, |buffer, _| buffer.give_up_waiting()); + } + } for buffer in self.buffers() { buffer.update(cx, |buffer, cx| { @@ -803,9 +1303,13 @@ impl BufferStore { }); } - // Wake up all futures currently waiting on a buffer to get opened, - // to give them a chance to fail now that we've disconnected. - self.remote_buffer_listeners.clear(); + if let Some(remote) = self.state.as_remote() { + remote.update(cx, |remote, _| { + // Wake up all futures currently waiting on a buffer to get opened, + // to give them a chance to fail now that we've disconnected. + remote.remote_buffer_listeners.clear() + }) + } } pub fn shared( @@ -822,14 +1326,6 @@ impl BufferStore { self.forget_shared_buffers(); } - fn drop_unnecessary_buffers(&mut self, cx: &mut AppContext) { - for open_buffer in self.opened_buffers.values_mut() { - if let Some(buffer) = open_buffer.upgrade() { - buffer.update(cx, |buffer, _| buffer.give_up_waiting()); - } - } - } - pub fn discard_incomplete(&mut self) { self.opened_buffers .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_))); @@ -897,7 +1393,11 @@ impl BufferStore { ) { match event { BufferEvent::FileHandleChanged => { - self.buffer_changed_file(buffer, cx); + if let Some(local) = self.state.as_local() { + local.update(cx, |local, cx| { + local.buffer_changed_file(buffer, cx); + }) + } } BufferEvent::Reloaded => { let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else { @@ -905,164 +1405,17 @@ impl BufferStore { }; let buffer = buffer.read(cx); downstream_client - .send(proto::BufferReloaded { - project_id: *project_id, - buffer_id: buffer.remote_id().to_proto(), - version: serialize_version(&buffer.version()), - mtime: buffer.saved_mtime().map(|t| t.into()), - line_ending: serialize_line_ending(buffer.line_ending()) as i32, - }) - .log_err(); - } - _ => {} - } - } - - fn local_worktree_entry_changed( - &mut self, - entry_id: ProjectEntryId, - path: &Arc, - worktree: &Model, - snapshot: &worktree::Snapshot, - cx: &mut ModelContext, - ) -> Option<()> { - let project_path = ProjectPath { - worktree_id: snapshot.id(), - path: path.clone(), - }; - let buffer_id = match self.local_buffer_ids_by_entry_id.get(&entry_id) { - Some(&buffer_id) => buffer_id, - None => self.local_buffer_ids_by_path.get(&project_path).copied()?, - }; - let buffer = if let Some(buffer) = self.get(buffer_id) { - buffer - } else { - self.opened_buffers.remove(&buffer_id); - self.local_buffer_ids_by_path.remove(&project_path); - self.local_buffer_ids_by_entry_id.remove(&entry_id); - return None; - }; - - let events = buffer.update(cx, |buffer, cx| { - let file = buffer.file()?; - let old_file = File::from_dyn(Some(file))?; - if old_file.worktree != *worktree { - return None; - } - - let new_file = if let Some(entry) = old_file - .entry_id - .and_then(|entry_id| snapshot.entry_for_id(entry_id)) - { - File { - is_local: true, - entry_id: Some(entry.id), - mtime: entry.mtime, - path: entry.path.clone(), - worktree: worktree.clone(), - is_deleted: false, - is_private: entry.is_private, - } - } else if let Some(entry) = snapshot.entry_for_path(old_file.path.as_ref()) { - File { - is_local: true, - entry_id: Some(entry.id), - mtime: entry.mtime, - path: entry.path.clone(), - worktree: worktree.clone(), - is_deleted: false, - is_private: entry.is_private, - } - } else { - File { - is_local: true, - entry_id: old_file.entry_id, - path: old_file.path.clone(), - mtime: old_file.mtime, - worktree: worktree.clone(), - is_deleted: true, - is_private: old_file.is_private, - } - }; - - if new_file == *old_file { - return None; - } - - let mut events = Vec::new(); - if new_file.path != old_file.path { - self.local_buffer_ids_by_path.remove(&ProjectPath { - path: old_file.path.clone(), - worktree_id: old_file.worktree_id(cx), - }); - self.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: new_file.worktree_id(cx), - path: new_file.path.clone(), - }, - buffer_id, - ); - events.push(BufferStoreEvent::BufferChangedFilePath { - buffer: cx.handle(), - old_file: buffer.file().cloned(), - }); - } - - if new_file.entry_id != old_file.entry_id { - if let Some(entry_id) = old_file.entry_id { - self.local_buffer_ids_by_entry_id.remove(&entry_id); - } - if let Some(entry_id) = new_file.entry_id { - self.local_buffer_ids_by_entry_id - .insert(entry_id, buffer_id); - } - } - - if let Some((client, project_id)) = &self.downstream_client { - client - .send(proto::UpdateBufferFile { + .send(proto::BufferReloaded { project_id: *project_id, - buffer_id: buffer_id.to_proto(), - file: Some(new_file.to_proto(cx)), + buffer_id: buffer.remote_id().to_proto(), + version: serialize_version(&buffer.version()), + mtime: buffer.saved_mtime().map(|t| t.into()), + line_ending: serialize_line_ending(buffer.line_ending()) as i32, }) - .ok(); + .log_err(); } - - buffer.file_updated(Arc::new(new_file), cx); - Some(events) - })?; - - for event in events { - cx.emit(event); + _ => {} } - - None - } - - fn buffer_changed_file(&mut self, buffer: Model, cx: &mut AppContext) -> Option<()> { - let file = File::from_dyn(buffer.read(cx).file())?; - - let remote_id = buffer.read(cx).remote_id(); - if let Some(entry_id) = file.entry_id { - match self.local_buffer_ids_by_entry_id.get(&entry_id) { - Some(_) => { - return None; - } - None => { - self.local_buffer_ids_by_entry_id - .insert(entry_id, remote_id); - } - } - }; - self.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: file.worktree_id(cx), - path: file.path.clone(), - }, - remote_id, - ); - - Some(()) } pub async fn handle_update_buffer( @@ -1186,93 +1539,14 @@ impl BufferStore { capability: Capability, cx: &mut ModelContext, ) -> Result<()> { - match envelope - .payload - .variant - .ok_or_else(|| anyhow!("missing variant"))? - { - proto::create_buffer_for_peer::Variant::State(mut state) => { - let buffer_id = BufferId::new(state.id)?; - - let buffer_result = maybe!({ - let mut buffer_file = None; - if let Some(file) = state.file.take() { - let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id); - let worktree = self - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - .ok_or_else(|| { - anyhow!("no worktree found for id {}", file.worktree_id) - })?; - buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?) - as Arc); - } - Buffer::from_proto(replica_id, capability, state, buffer_file) - }); - - match buffer_result { - Ok(buffer) => { - let buffer = cx.new_model(|_| buffer); - self.loading_remote_buffers_by_id.insert(buffer_id, buffer); - } - Err(error) => { - if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) { - for listener in listeners { - listener.send(Err(anyhow!(error.cloned()))).ok(); - } - } - } - } - } - proto::create_buffer_for_peer::Variant::Chunk(chunk) => { - let buffer_id = BufferId::new(chunk.buffer_id)?; - let buffer = self - .loading_remote_buffers_by_id - .get(&buffer_id) - .cloned() - .ok_or_else(|| { - anyhow!( - "received chunk for buffer {} without initial state", - chunk.buffer_id - ) - })?; - - let result = maybe!({ - let operations = chunk - .operations - .into_iter() - .map(language::proto::deserialize_operation) - .collect::>>()?; - buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)); - anyhow::Ok(()) - }); + let Some(remote) = self.state.as_remote() else { + return Err(anyhow!("buffer store is not a remote")); + }; - if let Err(error) = result { - self.loading_remote_buffers_by_id.remove(&buffer_id); - if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) { - for listener in listeners { - listener.send(Err(error.cloned())).ok(); - } - } - } else if chunk.is_last { - self.loading_remote_buffers_by_id.remove(&buffer_id); - // retain buffers sent by peers to avoid races. - match &mut self.state { - BufferStoreState::Remote { - ref mut shared_with_me, - upstream_client, - .. - } => { - if upstream_client.is_via_collab() { - shared_with_me.insert(buffer.clone()); - } - } - _ => {} - } - self.add_buffer(buffer, cx)?; - } - } + if let Some(buffer) = remote.update(cx, |remote, cx| { + remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx) + })? { + self.add_buffer(buffer, cx)?; } Ok(()) @@ -1288,7 +1562,7 @@ impl BufferStore { this.update(&mut cx, |this, cx| { let payload = envelope.payload.clone(); - if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?; let worktree = this .worktree_store @@ -1313,6 +1587,15 @@ impl BufferStore { cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file }); } } + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + downstream_client + .send(proto::UpdateBufferFile { + project_id: *project_id, + buffer_id: buffer_id.into(), + file: envelope.payload.file, + }) + .log_err(); + } Ok(()) })? } @@ -1325,11 +1608,20 @@ impl BufferStore { this.update(&mut cx, |this, cx| { let buffer_id = envelope.payload.buffer_id; let buffer_id = BufferId::new(buffer_id)?; - if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { buffer.update(cx, |buffer, cx| { - buffer.set_diff_base(envelope.payload.diff_base, cx) + buffer.set_diff_base(envelope.payload.diff_base.clone(), cx) }); } + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + downstream_client + .send(proto::UpdateDiffBase { + project_id: *project_id, + buffer_id: buffer_id.into(), + diff_base: envelope.payload.diff_base, + }) + .log_err(); + } Ok(()) })? } @@ -1408,13 +1700,24 @@ impl BufferStore { ) -> Result<()> { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let version = deserialize_version(&envelope.payload.version); - let mtime = envelope.payload.mtime.map(|time| time.into()); - this.update(&mut cx, |this, cx| { - if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + let mtime = envelope.payload.mtime.clone().map(|time| time.into()); + this.update(&mut cx, move |this, cx| { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { buffer.update(cx, |buffer, cx| { buffer.did_save(version, mtime, cx); }); } + + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + downstream_client + .send(proto::BufferSaved { + project_id: *project_id, + buffer_id: buffer_id.into(), + mtime: envelope.payload.mtime, + version: envelope.payload.version, + }) + .log_err(); + } }) } @@ -1425,17 +1728,29 @@ impl BufferStore { ) -> Result<()> { let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let version = deserialize_version(&envelope.payload.version); - let mtime = envelope.payload.mtime.map(|time| time.into()); + let mtime = envelope.payload.mtime.clone().map(|time| time.into()); let line_ending = deserialize_line_ending( proto::LineEnding::from_i32(envelope.payload.line_ending) .ok_or_else(|| anyhow!("missing line ending"))?, ); this.update(&mut cx, |this, cx| { - if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id, cx) { buffer.update(cx, |buffer, cx| { buffer.did_reload(version, line_ending, mtime, cx); }); } + + if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() { + downstream_client + .send(proto::BufferReloaded { + project_id: *project_id, + buffer_id: buffer_id.into(), + mtime: envelope.payload.mtime, + version: envelope.payload.version, + line_ending: envelope.payload.line_ending, + }) + .log_err(); + } }) } @@ -1480,66 +1795,14 @@ impl BufferStore { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - let mut local_buffers = Vec::new(); - let mut remote_buffers = Vec::new(); - for buffer_handle in buffers { - let buffer = buffer_handle.read(cx); - if buffer.is_dirty() { - if let Some(file) = File::from_dyn(buffer.file()) { - if file.is_local() { - local_buffers.push(buffer_handle); - } else { - remote_buffers.push(buffer_handle); - } - } - } + let buffers: Vec> = buffers + .into_iter() + .filter(|buffer| buffer.read(cx).is_dirty()) + .collect(); + if buffers.is_empty() { + return Task::ready(Ok(ProjectTransaction::default())); } - - let client = self.upstream_client(); - - cx.spawn(move |this, mut cx| async move { - let mut project_transaction = ProjectTransaction::default(); - if let Some((client, project_id)) = client { - let response = client - .request(proto::ReloadBuffers { - project_id, - buffer_ids: remote_buffers - .iter() - .filter_map(|buffer| { - buffer - .update(&mut cx, |buffer, _| buffer.remote_id().into()) - .ok() - }) - .collect(), - }) - .await? - .transaction - .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this, - response, - push_to_history, - cx.clone(), - ) - .await?; - } - - for buffer in local_buffers { - let transaction = buffer - .update(&mut cx, |buffer, cx| buffer.reload(cx))? - .await?; - buffer.update(&mut cx, |buffer, cx| { - if let Some(transaction) = transaction { - if !push_to_history { - buffer.forget_transaction(transaction.id); - } - project_transaction.0.insert(cx.handle(), transaction); - } - })?; - } - - Ok(project_transaction) - }) + self.state.reload_buffers(buffers, push_to_history, cx) } async fn handle_reload_buffers( @@ -1629,17 +1892,6 @@ impl BufferStore { }) } - pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { - match &self.state { - BufferStoreState::Remote { - upstream_client, - project_id, - .. - } => Some((upstream_client.clone(), *project_id)), - BufferStoreState::Local { .. } => None, - } - } - pub fn forget_shared_buffers(&mut self) { self.shared_buffers.clear(); } @@ -1658,6 +1910,72 @@ impl BufferStore { &self.shared_buffers } + pub fn create_local_buffer( + &mut self, + text: &str, + language: Option>, + cx: &mut ModelContext, + ) -> Model { + let buffer = cx.new_model(|cx| { + Buffer::local(text, cx) + .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx) + }); + + self.add_buffer(buffer.clone(), cx).log_err(); + let buffer_id = buffer.read(cx).remote_id(); + + let local = self + .state + .as_local() + .expect("local-only method called in a non-local context"); + local.update(cx, |this, cx| { + if let Some(file) = File::from_dyn(buffer.read(cx).file()) { + this.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: file.worktree_id(cx), + path: file.path.clone(), + }, + buffer_id, + ); + + if let Some(entry_id) = file.entry_id { + this.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); + } + } + }); + buffer + } + + pub fn deserialize_project_transaction( + &mut self, + message: proto::ProjectTransaction, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + if let Some(remote) = self.state.as_remote() { + remote.update(cx, |remote, cx| { + remote.deserialize_project_transaction(message, push_to_history, cx) + }) + } else { + debug_panic!("not a remote buffer store"); + Task::ready(Err(anyhow!("not a remote buffer store"))) + } + } + + pub fn wait_for_remote_buffer( + &self, + id: BufferId, + cx: &mut AppContext, + ) -> Task>> { + if let Some(remote) = self.state.as_remote() { + remote.update(cx, |remote, cx| remote.wait_for_remote_buffer(id, cx)) + } else { + debug_panic!("not a remote buffer store"); + Task::ready(Err(anyhow!("not a remote buffer store"))) + } + } + pub fn serialize_project_transaction_for_peer( &mut self, project_transaction: ProjectTransaction, @@ -1680,41 +1998,6 @@ impl BufferStore { } serialized_transaction } - - pub async fn deserialize_project_transaction( - this: WeakModel, - message: proto::ProjectTransaction, - push_to_history: bool, - mut cx: AsyncAppContext, - ) -> Result { - let mut project_transaction = ProjectTransaction::default(); - for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions) { - let buffer_id = BufferId::new(buffer_id)?; - let buffer = this - .update(&mut cx, |this, cx| { - this.wait_for_remote_buffer(buffer_id, cx) - })? - .await?; - let transaction = language::proto::deserialize_transaction(transaction)?; - project_transaction.0.insert(buffer, transaction); - } - - for (buffer, transaction) in &project_transaction.0 { - buffer - .update(&mut cx, |buffer, _| { - buffer.wait_for_edits(transaction.edit_ids.iter().copied()) - })? - .await?; - - if push_to_history { - buffer.update(&mut cx, |buffer, _| { - buffer.push_transaction(transaction.clone(), Instant::now()); - })?; - } - } - - Ok(project_transaction) - } } impl OpenBuffer { diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index 2b7b10d9b369a9..96eb327e8c4344 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -1,10 +1,9 @@ mod signature_help; use crate::{ - buffer_store::BufferStore, lsp_store::LspStore, CodeAction, CoreCompletion, DocumentHighlight, - Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart, - InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink, MarkupContent, - ProjectTransaction, ResolveState, + lsp_store::LspStore, CodeAction, CoreCompletion, DocumentHighlight, Hover, HoverBlock, + HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintLabelPartTooltip, + InlayHintTooltip, Location, LocationLink, MarkupContent, ProjectTransaction, ResolveState, }; use anyhow::{anyhow, Context, Result}; use async_trait::async_trait; @@ -417,18 +416,18 @@ impl LspCommand for PerformRename { message: proto::PerformRenameResponse, lsp_store: Model, _: Model, - cx: AsyncAppContext, + mut cx: AsyncAppContext, ) -> Result { let message = message .transaction .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - lsp_store.read_with(&cx, |lsp_store, _| lsp_store.buffer_store().downgrade())?, - message, - self.push_to_history, - cx, - ) - .await + lsp_store + .update(&mut cx, |lsp_store, cx| { + lsp_store.buffer_store().update(cx, |buffer_store, cx| { + buffer_store.deserialize_project_transaction(message, self.push_to_history, cx) + }) + })? + .await } fn buffer_id_from_proto(message: &proto::PerformRename) -> Result { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6c71d4baebf563..8d859c091bfe93 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1601,19 +1601,19 @@ impl LspStore { buffer_id: buffer_handle.read(cx).remote_id().into(), action: Some(Self::serialize_code_action(&action)), }; - cx.spawn(move |this, cx| async move { + let buffer_store = self.buffer_store(); + cx.spawn(move |_, mut cx| async move { let response = upstream_client .request(request) .await? .transaction .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, - response, - push_to_history, - cx, - ) - .await + + buffer_store + .update(&mut cx, |buffer_store, cx| { + buffer_store.deserialize_project_transaction(response, push_to_history, cx) + })? + .await }) } else { let buffer = buffer_handle.read(cx); @@ -5062,6 +5062,7 @@ impl LspStore { .spawn(this.languages.language_for_name(language_name.0.as_ref())) .detach(); + // host let adapter = this.languages.get_or_register_lsp_adapter( language_name.clone(), server_name.clone(), @@ -5259,7 +5260,8 @@ impl LspStore { result }) } else if let Some((client, project_id)) = self.upstream_client() { - cx.spawn(move |this, mut cx| async move { + let buffer_store = self.buffer_store(); + cx.spawn(move |_, mut cx| async move { let response = client .request(proto::FormatBuffers { project_id, @@ -5274,13 +5276,12 @@ impl LspStore { .await? .transaction .ok_or_else(|| anyhow!("missing transaction"))?; - BufferStore::deserialize_project_transaction( - this.read_with(&cx, |this, _| this.buffer_store.downgrade())?, - response, - push_to_history, - cx, - ) - .await + + buffer_store + .update(&mut cx, |buffer_store, cx| { + buffer_store.deserialize_project_transaction(response, push_to_history, cx) + })? + .await }) } else { Task::ready(Ok(ProjectTransaction::default())) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 454a7586c8856c..fe4d2d6b01545e 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1667,16 +1667,8 @@ impl Project { } pub fn create_buffer(&mut self, cx: &mut ModelContext) -> Task>> { - self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.create_buffer( - if self.is_via_collab() { - Some((self.client.clone().into(), self.remote_id().unwrap())) - } else { - None - }, - cx, - ) - }) + self.buffer_store + .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx)) } pub fn create_local_buffer( @@ -1685,7 +1677,7 @@ impl Project { language: Option>, cx: &mut ModelContext, ) -> Model { - if self.is_via_collab() { + if self.is_via_collab() || self.is_via_ssh() { panic!("called create_local_buffer on a remote project") } self.buffer_store.update(cx, |buffer_store, cx| { @@ -3770,7 +3762,9 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let buffer = this.update(&mut cx, |this, cx| this.create_local_buffer("", None, cx))?; + let buffer = this + .update(&mut cx, |this, cx| this.create_buffer(cx))? + .await?; let peer_id = envelope.original_sender_id()?; Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx) diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index eca65f1349845c..084fcf9929f014 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -56,6 +56,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test }) .await .unwrap(); + buffer.update(cx, |buffer, cx| { assert_eq!(buffer.text(), "fn one() -> usize { 1 }"); assert_eq!( From fdb03d30587d3269fbb76a9f44a6f08a7f51df97 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 24 Sep 2024 15:16:27 -0700 Subject: [PATCH 311/762] Move DisplayDiffHunk into hunk_diff module (#18307) Release Notes: - N/A Co-authored-by: Marshall --- crates/editor/src/editor.rs | 3 +- crates/editor/src/element.rs | 7 +- crates/editor/src/git.rs | 308 ----------------------------- crates/editor/src/hunk_diff.rs | 340 ++++++++++++++++++++++++++++++--- 4 files changed, 320 insertions(+), 338 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index a32910e78ab973..316d945ca4df63 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -71,7 +71,6 @@ pub use element::{ use futures::{future, FutureExt}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::blame::GitBlame; -use git::diff_hunk_to_display; use gpui::{ div, impl_actions, point, prelude::*, px, relative, size, uniform_list, Action, AnyElement, AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds, ClipboardEntry, @@ -84,8 +83,8 @@ use gpui::{ }; use highlight_matching_bracket::refresh_matching_bracket_highlights; use hover_popover::{hide_hover, HoverState}; -use hunk_diff::ExpandedHunks; pub(crate) use hunk_diff::HoveredHunk; +use hunk_diff::{diff_hunk_to_display, ExpandedHunks}; use indent_guides::ActiveIndentGuidesState; use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy}; pub use inline_completion_provider::*; diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 31e4efb83b60e7..cf8edb67dccbc6 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -7,14 +7,11 @@ use crate::{ CurrentLineHighlight, DoubleClickInMultibuffer, MultiCursorModifier, ScrollBeyondLastLine, ShowScrollbar, }, - git::{ - blame::{CommitDetails, GitBlame}, - diff_hunk_to_display, DisplayDiffHunk, - }, + git::blame::{CommitDetails, GitBlame}, hover_popover::{ self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT, }, - hunk_diff::ExpandedHunk, + hunk_diff::{diff_hunk_to_display, DisplayDiffHunk, ExpandedHunk}, hunk_status, items::BufferSearchHighlights, mouse_context_menu::{self, MenuPosition, MouseContextMenu}, diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index fb18ca45a2a2ff..080babe4c682a5 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -1,309 +1 @@ pub mod blame; - -use std::ops::Range; - -use git::diff::DiffHunkStatus; -use language::Point; -use multi_buffer::{Anchor, MultiBufferDiffHunk}; - -use crate::{ - display_map::{DisplaySnapshot, ToDisplayPoint}, - hunk_status, AnchorRangeExt, DisplayRow, -}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum DisplayDiffHunk { - Folded { - display_row: DisplayRow, - }, - - Unfolded { - diff_base_byte_range: Range, - display_row_range: Range, - multi_buffer_range: Range, - status: DiffHunkStatus, - }, -} - -impl DisplayDiffHunk { - pub fn start_display_row(&self) -> DisplayRow { - match self { - &DisplayDiffHunk::Folded { display_row } => display_row, - DisplayDiffHunk::Unfolded { - display_row_range, .. - } => display_row_range.start, - } - } - - pub fn contains_display_row(&self, display_row: DisplayRow) -> bool { - let range = match self { - &DisplayDiffHunk::Folded { display_row } => display_row..=display_row, - - DisplayDiffHunk::Unfolded { - display_row_range, .. - } => display_row_range.start..=display_row_range.end, - }; - - range.contains(&display_row) - } -} - -pub fn diff_hunk_to_display( - hunk: &MultiBufferDiffHunk, - snapshot: &DisplaySnapshot, -) -> DisplayDiffHunk { - let hunk_start_point = Point::new(hunk.row_range.start.0, 0); - let hunk_start_point_sub = Point::new(hunk.row_range.start.0.saturating_sub(1), 0); - let hunk_end_point_sub = Point::new( - hunk.row_range - .end - .0 - .saturating_sub(1) - .max(hunk.row_range.start.0), - 0, - ); - - let status = hunk_status(hunk); - let is_removal = status == DiffHunkStatus::Removed; - - let folds_start = Point::new(hunk.row_range.start.0.saturating_sub(2), 0); - let folds_end = Point::new(hunk.row_range.end.0 + 2, 0); - let folds_range = folds_start..folds_end; - - let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| { - let fold_point_range = fold.range.to_point(&snapshot.buffer_snapshot); - let fold_point_range = fold_point_range.start..=fold_point_range.end; - - let folded_start = fold_point_range.contains(&hunk_start_point); - let folded_end = fold_point_range.contains(&hunk_end_point_sub); - let folded_start_sub = fold_point_range.contains(&hunk_start_point_sub); - - (folded_start && folded_end) || (is_removal && folded_start_sub) - }); - - if let Some(fold) = containing_fold { - let row = fold.range.start.to_display_point(snapshot).row(); - DisplayDiffHunk::Folded { display_row: row } - } else { - let start = hunk_start_point.to_display_point(snapshot).row(); - - let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start); - let hunk_end_point = Point::new(hunk_end_row.0, 0); - - let multi_buffer_start = snapshot.buffer_snapshot.anchor_before(hunk_start_point); - let multi_buffer_end = snapshot.buffer_snapshot.anchor_after(hunk_end_point); - let end = hunk_end_point.to_display_point(snapshot).row(); - - DisplayDiffHunk::Unfolded { - display_row_range: start..end, - multi_buffer_range: multi_buffer_start..multi_buffer_end, - status, - diff_base_byte_range: hunk.diff_base_byte_range.clone(), - } - } -} - -#[cfg(test)] -mod tests { - use crate::Point; - use crate::{editor_tests::init_test, hunk_status}; - use gpui::{Context, TestAppContext}; - use language::Capability::ReadWrite; - use multi_buffer::{ExcerptRange, MultiBuffer, MultiBufferRow}; - use project::{FakeFs, Project}; - use unindent::Unindent; - #[gpui::test] - async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { - use git::diff::DiffHunkStatus; - init_test(cx, |_| {}); - - let fs = FakeFs::new(cx.background_executor.clone()); - let project = Project::test(fs, [], cx).await; - - // buffer has two modified hunks with two rows each - let buffer_1 = project.update(cx, |project, cx| { - project.create_local_buffer( - " - 1.zero - 1.ONE - 1.TWO - 1.three - 1.FOUR - 1.FIVE - 1.six - " - .unindent() - .as_str(), - None, - cx, - ) - }); - buffer_1.update(cx, |buffer, cx| { - buffer.set_diff_base( - Some( - " - 1.zero - 1.one - 1.two - 1.three - 1.four - 1.five - 1.six - " - .unindent(), - ), - cx, - ); - }); - - // buffer has a deletion hunk and an insertion hunk - let buffer_2 = project.update(cx, |project, cx| { - project.create_local_buffer( - " - 2.zero - 2.one - 2.two - 2.three - 2.four - 2.five - 2.six - " - .unindent() - .as_str(), - None, - cx, - ) - }); - buffer_2.update(cx, |buffer, cx| { - buffer.set_diff_base( - Some( - " - 2.zero - 2.one - 2.one-and-a-half - 2.two - 2.three - 2.four - 2.six - " - .unindent(), - ), - cx, - ); - }); - - cx.background_executor.run_until_parked(); - - let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.push_excerpts( - buffer_1.clone(), - [ - // excerpt ends in the middle of a modified hunk - ExcerptRange { - context: Point::new(0, 0)..Point::new(1, 5), - primary: Default::default(), - }, - // excerpt begins in the middle of a modified hunk - ExcerptRange { - context: Point::new(5, 0)..Point::new(6, 5), - primary: Default::default(), - }, - ], - cx, - ); - multibuffer.push_excerpts( - buffer_2.clone(), - [ - // excerpt ends at a deletion - ExcerptRange { - context: Point::new(0, 0)..Point::new(1, 5), - primary: Default::default(), - }, - // excerpt starts at a deletion - ExcerptRange { - context: Point::new(2, 0)..Point::new(2, 5), - primary: Default::default(), - }, - // excerpt fully contains a deletion hunk - ExcerptRange { - context: Point::new(1, 0)..Point::new(2, 5), - primary: Default::default(), - }, - // excerpt fully contains an insertion hunk - ExcerptRange { - context: Point::new(4, 0)..Point::new(6, 5), - primary: Default::default(), - }, - ], - cx, - ); - multibuffer - }); - - let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx)); - - assert_eq!( - snapshot.text(), - " - 1.zero - 1.ONE - 1.FIVE - 1.six - 2.zero - 2.one - 2.two - 2.one - 2.two - 2.four - 2.five - 2.six" - .unindent() - ); - - let expected = [ - ( - DiffHunkStatus::Modified, - MultiBufferRow(1)..MultiBufferRow(2), - ), - ( - DiffHunkStatus::Modified, - MultiBufferRow(2)..MultiBufferRow(3), - ), - //TODO: Define better when and where removed hunks show up at range extremities - ( - DiffHunkStatus::Removed, - MultiBufferRow(6)..MultiBufferRow(6), - ), - ( - DiffHunkStatus::Removed, - MultiBufferRow(8)..MultiBufferRow(8), - ), - ( - DiffHunkStatus::Added, - MultiBufferRow(10)..MultiBufferRow(11), - ), - ]; - - assert_eq!( - snapshot - .git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.row_range)) - .collect::>(), - &expected, - ); - - assert_eq!( - snapshot - .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.row_range)) - .collect::>(), - expected - .iter() - .rev() - .cloned() - .collect::>() - .as_slice(), - ); - } -} diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 2f7bb49e853491..67e8a25df58c56 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -1,18 +1,16 @@ -use std::{ - ops::{Range, RangeInclusive}, - sync::Arc, -}; - use collections::{hash_map, HashMap, HashSet}; use git::diff::DiffHunkStatus; use gpui::{Action, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View}; -use language::Buffer; +use language::{Buffer, BufferId, Point}; use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, MultiBufferSnapshot, ToPoint, }; use settings::SettingsStore; -use text::{BufferId, Point}; +use std::{ + ops::{Range, RangeInclusive}, + sync::Arc, +}; use ui::{ prelude::*, ActiveTheme, ContextMenu, InteractiveElement, IntoElement, ParentElement, Pixels, Styled, ViewContext, VisualContext, @@ -20,13 +18,11 @@ use ui::{ use util::{debug_panic, RangeExt}; use crate::{ - editor_settings::CurrentLineHighlight, - git::{diff_hunk_to_display, DisplayDiffHunk}, - hunk_status, hunks_for_selections, - mouse_context_menu::MouseContextMenu, - BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, Editor, - EditorElement, EditorSnapshot, ExpandAllHunkDiffs, RangeToAnchorExt, RevertFile, - RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, + editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, + mouse_context_menu::MouseContextMenu, BlockDisposition, BlockProperties, BlockStyle, + CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, Editor, EditorElement, + EditorSnapshot, ExpandAllHunkDiffs, RangeToAnchorExt, RevertFile, RevertSelectedHunks, + ToDisplayPoint, ToggleHunkDiff, }; #[derive(Debug, Clone)] @@ -43,12 +39,35 @@ pub(super) struct ExpandedHunks { hunk_update_tasks: HashMap, Task<()>>, } +#[derive(Debug, Clone)] +pub(super) struct ExpandedHunk { + pub block: Option, + pub hunk_range: Range, + pub diff_base_byte_range: Range, + pub status: DiffHunkStatus, + pub folded: bool, +} + #[derive(Debug)] struct DiffBaseBuffer { buffer: Model, diff_base_version: usize, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum DisplayDiffHunk { + Folded { + display_row: DisplayRow, + }, + + Unfolded { + diff_base_byte_range: Range, + display_row_range: Range, + multi_buffer_range: Range, + status: DiffHunkStatus, + }, +} + impl ExpandedHunks { pub fn hunks(&self, include_folded: bool) -> impl Iterator { self.hunks @@ -57,15 +76,6 @@ impl ExpandedHunks { } } -#[derive(Debug, Clone)] -pub(super) struct ExpandedHunk { - pub block: Option, - pub hunk_range: Range, - pub diff_base_byte_range: Range, - pub status: DiffHunkStatus, - pub folded: bool, -} - impl Editor { pub(super) fn open_hunk_context_menu( &mut self, @@ -883,3 +893,287 @@ fn to_inclusive_row_range( let new_range = point_range.to_anchors(&snapshot.buffer_snapshot); new_range.start..=new_range.end } + +impl DisplayDiffHunk { + pub fn start_display_row(&self) -> DisplayRow { + match self { + &DisplayDiffHunk::Folded { display_row } => display_row, + DisplayDiffHunk::Unfolded { + display_row_range, .. + } => display_row_range.start, + } + } + + pub fn contains_display_row(&self, display_row: DisplayRow) -> bool { + let range = match self { + &DisplayDiffHunk::Folded { display_row } => display_row..=display_row, + + DisplayDiffHunk::Unfolded { + display_row_range, .. + } => display_row_range.start..=display_row_range.end, + }; + + range.contains(&display_row) + } +} + +pub fn diff_hunk_to_display( + hunk: &MultiBufferDiffHunk, + snapshot: &DisplaySnapshot, +) -> DisplayDiffHunk { + let hunk_start_point = Point::new(hunk.row_range.start.0, 0); + let hunk_start_point_sub = Point::new(hunk.row_range.start.0.saturating_sub(1), 0); + let hunk_end_point_sub = Point::new( + hunk.row_range + .end + .0 + .saturating_sub(1) + .max(hunk.row_range.start.0), + 0, + ); + + let status = hunk_status(hunk); + let is_removal = status == DiffHunkStatus::Removed; + + let folds_start = Point::new(hunk.row_range.start.0.saturating_sub(2), 0); + let folds_end = Point::new(hunk.row_range.end.0 + 2, 0); + let folds_range = folds_start..folds_end; + + let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| { + let fold_point_range = fold.range.to_point(&snapshot.buffer_snapshot); + let fold_point_range = fold_point_range.start..=fold_point_range.end; + + let folded_start = fold_point_range.contains(&hunk_start_point); + let folded_end = fold_point_range.contains(&hunk_end_point_sub); + let folded_start_sub = fold_point_range.contains(&hunk_start_point_sub); + + (folded_start && folded_end) || (is_removal && folded_start_sub) + }); + + if let Some(fold) = containing_fold { + let row = fold.range.start.to_display_point(snapshot).row(); + DisplayDiffHunk::Folded { display_row: row } + } else { + let start = hunk_start_point.to_display_point(snapshot).row(); + + let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start); + let hunk_end_point = Point::new(hunk_end_row.0, 0); + + let multi_buffer_start = snapshot.buffer_snapshot.anchor_before(hunk_start_point); + let multi_buffer_end = snapshot.buffer_snapshot.anchor_after(hunk_end_point); + let end = hunk_end_point.to_display_point(snapshot).row(); + + DisplayDiffHunk::Unfolded { + display_row_range: start..end, + multi_buffer_range: multi_buffer_start..multi_buffer_end, + status, + diff_base_byte_range: hunk.diff_base_byte_range.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{editor_tests::init_test, hunk_status}; + use gpui::{Context, TestAppContext}; + use language::Capability::ReadWrite; + use multi_buffer::{ExcerptRange, MultiBuffer, MultiBufferRow}; + use project::{FakeFs, Project}; + use unindent::Unindent as _; + + #[gpui::test] + async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { + use git::diff::DiffHunkStatus; + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.background_executor.clone()); + let project = Project::test(fs, [], cx).await; + + // buffer has two modified hunks with two rows each + let buffer_1 = project.update(cx, |project, cx| { + project.create_local_buffer( + " + 1.zero + 1.ONE + 1.TWO + 1.three + 1.FOUR + 1.FIVE + 1.six + " + .unindent() + .as_str(), + None, + cx, + ) + }); + buffer_1.update(cx, |buffer, cx| { + buffer.set_diff_base( + Some( + " + 1.zero + 1.one + 1.two + 1.three + 1.four + 1.five + 1.six + " + .unindent(), + ), + cx, + ); + }); + + // buffer has a deletion hunk and an insertion hunk + let buffer_2 = project.update(cx, |project, cx| { + project.create_local_buffer( + " + 2.zero + 2.one + 2.two + 2.three + 2.four + 2.five + 2.six + " + .unindent() + .as_str(), + None, + cx, + ) + }); + buffer_2.update(cx, |buffer, cx| { + buffer.set_diff_base( + Some( + " + 2.zero + 2.one + 2.one-and-a-half + 2.two + 2.three + 2.four + 2.six + " + .unindent(), + ), + cx, + ); + }); + + cx.background_executor.run_until_parked(); + + let multibuffer = cx.new_model(|cx| { + let mut multibuffer = MultiBuffer::new(ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ + // excerpt ends in the middle of a modified hunk + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 5), + primary: Default::default(), + }, + // excerpt begins in the middle of a modified hunk + ExcerptRange { + context: Point::new(5, 0)..Point::new(6, 5), + primary: Default::default(), + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ + // excerpt ends at a deletion + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 5), + primary: Default::default(), + }, + // excerpt starts at a deletion + ExcerptRange { + context: Point::new(2, 0)..Point::new(2, 5), + primary: Default::default(), + }, + // excerpt fully contains a deletion hunk + ExcerptRange { + context: Point::new(1, 0)..Point::new(2, 5), + primary: Default::default(), + }, + // excerpt fully contains an insertion hunk + ExcerptRange { + context: Point::new(4, 0)..Point::new(6, 5), + primary: Default::default(), + }, + ], + cx, + ); + multibuffer + }); + + let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx)); + + assert_eq!( + snapshot.text(), + " + 1.zero + 1.ONE + 1.FIVE + 1.six + 2.zero + 2.one + 2.two + 2.one + 2.two + 2.four + 2.five + 2.six" + .unindent() + ); + + let expected = [ + ( + DiffHunkStatus::Modified, + MultiBufferRow(1)..MultiBufferRow(2), + ), + ( + DiffHunkStatus::Modified, + MultiBufferRow(2)..MultiBufferRow(3), + ), + //TODO: Define better when and where removed hunks show up at range extremities + ( + DiffHunkStatus::Removed, + MultiBufferRow(6)..MultiBufferRow(6), + ), + ( + DiffHunkStatus::Removed, + MultiBufferRow(8)..MultiBufferRow(8), + ), + ( + DiffHunkStatus::Added, + MultiBufferRow(10)..MultiBufferRow(11), + ), + ]; + + assert_eq!( + snapshot + .git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) + .collect::>(), + &expected, + ); + + assert_eq!( + snapshot + .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) + .collect::>(), + expected + .iter() + .rev() + .cloned() + .collect::>() + .as_slice(), + ); + } +} From d33600525ee372813293e803d6aa0f2fa7d50fcb Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Sep 2024 16:23:08 -0600 Subject: [PATCH 312/762] ssh remoting: Fix cmd-o (#18308) Release Notes: - ssh-remoting: Cmd-O now correctly opens files on the remote host --------- Co-authored-by: Mikayla --- crates/assistant/src/context_store.rs | 6 -- .../random_project_collaboration_tests.rs | 11 ++- crates/editor/src/editor.rs | 2 +- crates/feedback/src/feedback_modal.rs | 69 ++++++++----------- crates/file_finder/src/file_finder.rs | 2 +- crates/language_tools/src/lsp_log.rs | 4 +- crates/outline_panel/src/outline_panel.rs | 2 +- crates/project/src/project.rs | 46 +++++++------ crates/project_panel/src/project_panel.rs | 5 +- crates/tasks_ui/src/lib.rs | 2 +- crates/tasks_ui/src/modal.rs | 2 +- crates/terminal_view/src/terminal_panel.rs | 2 +- crates/title_bar/src/collab.rs | 8 +-- crates/workspace/src/workspace.rs | 12 ++-- crates/zed/src/zed.rs | 2 +- 15 files changed, 84 insertions(+), 91 deletions(-) diff --git a/crates/assistant/src/context_store.rs b/crates/assistant/src/context_store.rs index f57a2fbca613c3..f4f03dda377ba7 100644 --- a/crates/assistant/src/context_store.rs +++ b/crates/assistant/src/context_store.rs @@ -357,9 +357,6 @@ impl ContextStore { let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow!("project was not remote"))); }; - if project.is_local_or_ssh() { - return Task::ready(Err(anyhow!("cannot create remote contexts as the host"))); - } let replica_id = project.replica_id(); let capability = project.capability(); @@ -488,9 +485,6 @@ impl ContextStore { let Some(project_id) = project.remote_id() else { return Task::ready(Err(anyhow!("project was not remote"))); }; - if project.is_local_or_ssh() { - return Task::ready(Err(anyhow!("cannot open remote contexts as the host"))); - } if let Some(context) = self.loaded_context_for_id(&context_id, cx) { return Task::ready(Ok(context)); diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 831114ba1a0c9c..19d37f8786be6f 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -298,8 +298,7 @@ impl RandomizedTest for ProjectCollaborationTest { continue; }; let project_root_name = root_name_for_project(&project, cx); - let is_local = - project.read_with(cx, |project, _| project.is_local_or_ssh()); + let is_local = project.read_with(cx, |project, _| project.is_local()); let worktree = project.read_with(cx, |project, cx| { project .worktrees(cx) @@ -335,7 +334,7 @@ impl RandomizedTest for ProjectCollaborationTest { continue; }; let project_root_name = root_name_for_project(&project, cx); - let is_local = project.read_with(cx, |project, _| project.is_local_or_ssh()); + let is_local = project.read_with(cx, |project, _| project.is_local()); match rng.gen_range(0..100_u32) { // Manipulate an existing buffer @@ -1256,7 +1255,7 @@ impl RandomizedTest for ProjectCollaborationTest { let buffers = client.buffers().clone(); for (guest_project, guest_buffers) in &buffers { let project_id = if guest_project.read_with(client_cx, |project, _| { - project.is_local_or_ssh() || project.is_disconnected() + project.is_local() || project.is_disconnected() }) { continue; } else { @@ -1560,9 +1559,7 @@ async fn ensure_project_shared( let first_root_name = root_name_for_project(project, cx); let active_call = cx.read(ActiveCall::global); if active_call.read_with(cx, |call, _| call.room().is_some()) - && project.read_with(cx, |project, _| { - project.is_local_or_ssh() && !project.is_shared() - }) + && project.read_with(cx, |project, _| project.is_local() && !project.is_shared()) { match active_call .update(cx, |call, cx| call.share_project(project.clone(), cx)) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 316d945ca4df63..b54889dc0d8c26 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11819,7 +11819,7 @@ impl Editor { .filter_map(|buffer| { let buffer = buffer.read(cx); let language = buffer.language()?; - if project.is_local_or_ssh() + if project.is_local() && project.language_servers_for_buffer(buffer, cx).count() == 0 { None diff --git a/crates/feedback/src/feedback_modal.rs b/crates/feedback/src/feedback_modal.rs index a4a07ad2ad44d0..4762b228d3e44f 100644 --- a/crates/feedback/src/feedback_modal.rs +++ b/crates/feedback/src/feedback_modal.rs @@ -18,8 +18,7 @@ use regex::Regex; use serde_derive::Serialize; use ui::{prelude::*, Button, ButtonStyle, IconPosition, Tooltip}; use util::ResultExt; -use workspace::notifications::NotificationId; -use workspace::{DismissDecision, ModalView, Toast, Workspace}; +use workspace::{DismissDecision, ModalView, Workspace}; use crate::{system_specs::SystemSpecs, GiveFeedback, OpenZedRepo}; @@ -120,44 +119,34 @@ impl FeedbackModal { pub fn register(workspace: &mut Workspace, cx: &mut ViewContext) { let _handle = cx.view().downgrade(); workspace.register_action(move |workspace, _: &GiveFeedback, cx| { - let markdown = workspace - .app_state() - .languages - .language_for_name("Markdown"); - - let project = workspace.project().clone(); - let is_local_project = project.read(cx).is_local_or_ssh(); - - if !is_local_project { - struct FeedbackInRemoteProject; - - workspace.show_toast( - Toast::new( - NotificationId::unique::(), - "You can only submit feedback in your own project.", - ), - cx, - ); - return; - } - - let system_specs = SystemSpecs::new(cx); - cx.spawn(|workspace, mut cx| async move { - let markdown = markdown.await.log_err(); - let buffer = project.update(&mut cx, |project, cx| { - project.create_local_buffer("", markdown, cx) - })?; - let system_specs = system_specs.await; - - workspace.update(&mut cx, |workspace, cx| { - workspace.toggle_modal(cx, move |cx| { - FeedbackModal::new(system_specs, project, buffer, cx) - }); - })?; - - anyhow::Ok(()) - }) - .detach_and_log_err(cx); + workspace + .with_local_workspace(cx, |workspace, cx| { + let markdown = workspace + .app_state() + .languages + .language_for_name("Markdown"); + + let project = workspace.project().clone(); + + let system_specs = SystemSpecs::new(cx); + cx.spawn(|workspace, mut cx| async move { + let markdown = markdown.await.log_err(); + let buffer = project.update(&mut cx, |project, cx| { + project.create_local_buffer("", markdown, cx) + })?; + let system_specs = system_specs.await; + + workspace.update(&mut cx, |workspace, cx| { + workspace.toggle_modal(cx, move |cx| { + FeedbackModal::new(system_specs, project, buffer, cx) + }); + })?; + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + }) + .detach_and_log_err(cx); }); } diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 4c3f92d3c156a0..726a8bcb5e6983 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -884,7 +884,7 @@ impl PickerDelegate for FileFinderDelegate { project .worktree_for_id(history_item.project.worktree_id, cx) .is_some() - || (project.is_local_or_ssh() && history_item.absolute.is_some()) + || (project.is_local() && history_item.absolute.is_some()) }), self.currently_opened_path.as_ref(), None, diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index bde5fe9b199e8d..d8fe3aa51840e4 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -184,7 +184,7 @@ pub fn init(cx: &mut AppContext) { cx.observe_new_views(move |workspace: &mut Workspace, cx| { let project = workspace.project(); - if project.read(cx).is_local_or_ssh() { + if project.read(cx).is_local() { log_store.update(cx, |store, cx| { store.add_project(project, cx); }); @@ -193,7 +193,7 @@ pub fn init(cx: &mut AppContext) { let log_store = log_store.clone(); workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, cx| { let project = workspace.project().read(cx); - if project.is_local_or_ssh() { + if project.is_local() { workspace.add_item_to_active_pane( Box::new(cx.new_view(|cx| { LspLogView::new(workspace.project().clone(), log_store.clone(), cx) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index da66ca40313d89..4944f770e73a6c 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -3909,7 +3909,7 @@ impl Render for OutlinePanel { .when(project.is_local(), |el| { el.on_action(cx.listener(Self::reveal_in_finder)) }) - .when(project.is_local_or_ssh(), |el| { + .when(project.is_local() || project.is_via_ssh(), |el| { el.on_action(cx.listener(Self::open_in_terminal)) }) .on_mouse_down( diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index fe4d2d6b01545e..5a9b235d91cdbf 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -487,7 +487,7 @@ impl DirectoryLister { pub fn is_local(&self, cx: &AppContext) -> bool { match self { DirectoryLister::Local(_) => true, - DirectoryLister::Project(project) => project.read(cx).is_local_or_ssh(), + DirectoryLister::Project(project) => project.read(cx).is_local(), } } @@ -1199,7 +1199,13 @@ impl Project { self.dev_server_project_id } - pub fn supports_remote_terminal(&self, cx: &AppContext) -> bool { + pub fn supports_terminal(&self, cx: &AppContext) -> bool { + if self.is_local() { + return true; + } + if self.is_via_ssh() { + return true; + } let Some(id) = self.dev_server_project_id else { return false; }; @@ -1213,10 +1219,6 @@ impl Project { } pub fn ssh_connection_string(&self, cx: &ModelContext) -> Option { - if self.is_local_or_ssh() { - return None; - } - let dev_server_id = self.dev_server_project_id()?; dev_server_projects::Store::global(cx) .read(cx) @@ -1643,13 +1645,6 @@ impl Project { } } - pub fn is_local_or_ssh(&self) -> bool { - match &self.client_state { - ProjectClientState::Local | ProjectClientState::Shared { .. } => true, - ProjectClientState::Remote { .. } => false, - } - } - pub fn is_via_ssh(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => { @@ -1735,7 +1730,7 @@ impl Project { ) -> Task>> { if let Some(buffer) = self.buffer_for_id(id, cx) { Task::ready(Ok(buffer)) - } else if self.is_local_or_ssh() { + } else if self.is_local() || self.is_via_ssh() { Task::ready(Err(anyhow!("buffer {} does not exist", id))) } else if let Some(project_id) = self.remote_id() { let request = self.client.request(proto::OpenBufferById { @@ -1857,7 +1852,7 @@ impl Project { let mut changes = rx.ready_chunks(MAX_BATCH_SIZE); while let Some(changes) = changes.next().await { - let is_local = this.update(&mut cx, |this, _| this.is_local_or_ssh())?; + let is_local = this.update(&mut cx, |this, _| this.is_local())?; for change in changes { match change { @@ -2001,7 +1996,7 @@ impl Project { language_server_id, message, } => { - if self.is_local_or_ssh() { + if self.is_local() { self.enqueue_buffer_ordered_message( BufferOrderedMessage::LanguageServerUpdate { language_server_id: *language_server_id, @@ -3039,8 +3034,19 @@ impl Project { query: String, cx: &mut ModelContext, ) -> Task>> { - if self.is_local_or_ssh() { + if self.is_local() { DirectoryLister::Local(self.fs.clone()).list_directory(query, cx) + } else if let Some(session) = self.ssh_session.as_ref() { + let request = proto::ListRemoteDirectory { + dev_server_id: SSH_PROJECT_ID, + path: query, + }; + + let response = session.request(request); + cx.background_executor().spawn(async move { + let response = response.await?; + Ok(response.entries.into_iter().map(PathBuf::from).collect()) + }) } else if let Some(dev_server) = self.dev_server_project_id().and_then(|id| { dev_server_projects::Store::global(cx) .read(cx) @@ -3317,7 +3323,7 @@ impl Project { mut cx: AsyncAppContext, ) -> Result<()> { this.update(&mut cx, |this, cx| { - if this.is_local_or_ssh() { + if this.is_local() || this.is_via_ssh() { this.unshare(cx)?; } else { this.disconnected_from_host(cx); @@ -3995,7 +4001,7 @@ impl Project { location: Location, cx: &mut ModelContext<'_, Project>, ) -> Task> { - if self.is_local_or_ssh() { + if self.is_local() { let (worktree_id, worktree_abs_path) = if let Some(worktree) = self.task_worktree(cx) { ( Some(worktree.read(cx).id()), @@ -4081,7 +4087,7 @@ impl Project { location: Option, cx: &mut ModelContext, ) -> Task>> { - if self.is_local_or_ssh() { + if self.is_local() { let (file, language) = location .map(|location| { let buffer = location.buffer.read(cx); diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 8e741134f0e446..6958bfb3318e23 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2722,11 +2722,14 @@ impl Render for ProjectPanel { } })) }) - .when(project.is_local_or_ssh(), |el| { + .when(project.is_local(), |el| { el.on_action(cx.listener(Self::reveal_in_finder)) .on_action(cx.listener(Self::open_system)) .on_action(cx.listener(Self::open_in_terminal)) }) + .when(project.is_via_ssh(), |el| { + el.on_action(cx.listener(Self::open_in_terminal)) + }) .on_mouse_down( MouseButton::Right, cx.listener(move |this, event: &MouseDownEvent, cx| { diff --git a/crates/tasks_ui/src/lib.rs b/crates/tasks_ui/src/lib.rs index 4ea4a8fa2ccdc0..fd14f9aaef2a40 100644 --- a/crates/tasks_ui/src/lib.rs +++ b/crates/tasks_ui/src/lib.rs @@ -94,7 +94,7 @@ fn toggle_modal(workspace: &mut Workspace, cx: &mut ViewContext<'_, Workspace>) workspace .update(&mut cx, |workspace, cx| { if workspace.project().update(cx, |project, cx| { - project.is_local_or_ssh() || project.ssh_connection_string(cx).is_some() + project.is_local() || project.ssh_connection_string(cx).is_some() }) { workspace.toggle_modal(cx, |cx| { TasksModal::new(project, task_context, workspace_handle, cx) diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 931a0b09c365fb..662e3f11fd05c4 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -225,7 +225,7 @@ impl PickerDelegate for TasksModalDelegate { if project.is_via_collab() && ssh_connection_string.is_none() { Task::ready((Vec::new(), Vec::new())) } else { - let remote_templates = if project.is_local_or_ssh() { + let remote_templates = if project.is_local() { None } else { project diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index f745fbe348ba40..72f8606fa21765 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -144,7 +144,7 @@ impl TerminalPanel { cx.subscribe(&pane, Self::handle_pane_event), ]; let project = workspace.project().read(cx); - let enabled = project.is_local_or_ssh() || project.supports_remote_terminal(cx); + let enabled = project.supports_terminal(cx); let this = Self { pane, fs: workspace.app_state().fs.clone(), diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 1f052e1a5e61ac..e9f89643d5729f 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -284,14 +284,14 @@ impl TitleBar { let room = room.read(cx); let project = self.project.read(cx); - let is_local = project.is_local_or_ssh(); let is_dev_server_project = project.dev_server_project_id().is_some(); - let is_shared = (is_local || is_dev_server_project) && project.is_shared(); + let is_shared = project.is_shared(); let is_muted = room.is_muted(); let is_deafened = room.is_deafened().unwrap_or(false); let is_screen_sharing = room.is_screen_sharing(); let can_use_microphone = room.can_use_microphone(); - let can_share_projects = room.can_share_projects(); + let can_share_projects = room.can_share_projects() + && (is_dev_server_project || project.is_local() || project.is_via_ssh()); let platform_supported = match self.platform_style { PlatformStyle::Mac => true, PlatformStyle::Linux | PlatformStyle::Windows => false, @@ -299,7 +299,7 @@ impl TitleBar { let mut children = Vec::new(); - if (is_local || is_dev_server_project) && can_share_projects { + if can_share_projects { children.push( Button::new( "toggle_sharing", diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 98f793c234aaeb..4290e12105a3c7 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1891,7 +1891,11 @@ impl Workspace { directories: true, multiple: true, }, - DirectoryLister::Local(self.app_state.fs.clone()), + if self.project.read(cx).is_via_ssh() { + DirectoryLister::Project(self.project.clone()) + } else { + DirectoryLister::Local(self.app_state.fs.clone()) + }, cx, ); @@ -3956,7 +3960,7 @@ impl Workspace { fn local_paths(&self, cx: &AppContext) -> Option>> { let project = self.project().read(cx); - if project.is_local_or_ssh() { + if project.is_local() { Some( project .visible_worktrees(cx) @@ -5160,7 +5164,7 @@ async fn join_channel_internal( return None; } - if (project.is_local_or_ssh() || is_dev_server) + if (project.is_local() || project.is_via_ssh() || is_dev_server) && project.visible_worktrees(cx).any(|tree| { tree.read(cx) .root_entry() @@ -5314,7 +5318,7 @@ pub fn local_workspace_windows(cx: &AppContext) -> Vec> .filter(|workspace| { workspace .read(cx) - .is_ok_and(|workspace| workspace.project.read(cx).is_local_or_ssh()) + .is_ok_and(|workspace| workspace.project.read(cx).is_local()) }) .collect() } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 8f4f1af24331c0..c631c01f99a1a6 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -230,7 +230,7 @@ pub fn initialize_workspace( let project = workspace.project().clone(); if project.update(cx, |project, cx| { - project.is_local_or_ssh() || project.ssh_connection_string(cx).is_some() + project.is_local() || project.is_via_ssh() || project.ssh_connection_string(cx).is_some() }) { project.update(cx, |project, cx| { let fs = app_state.fs.clone(); From 9a8601227d99ad7a8b123a1470a89615919ca43e Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 24 Sep 2024 22:23:32 +0000 Subject: [PATCH 313/762] docs: Add example of TOML/taplo LSP settings (#18293) --- docs/src/languages/toml.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/src/languages/toml.md b/docs/src/languages/toml.md index a4aa8436ff0893..3f33925a281b69 100644 --- a/docs/src/languages/toml.md +++ b/docs/src/languages/toml.md @@ -18,3 +18,15 @@ include = ["Cargo.toml", "some_directory/**/*.toml"] align_entries = true reorder_keys = true ``` + +Alternatively, you can pass taplo configuration options via [Zed LSP Settings](../configuring-zed.md#lsp) + +```json + "lsp": { + "taplo": { + "settings": { + "array_auto_collapse": false + } + } + } +``` From e9bc9ed5d568997026f3ef0d015cc75880e1b5fd Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Wed, 25 Sep 2024 16:00:17 +0800 Subject: [PATCH 314/762] remote_server: Fix opening a new remote project not refreshing the project panel (#18262) Currently, when open new remote project, project_panel not refresh, we must `ctrl-p` and select an file to refresh the project_panel. After that, project_panel will refresh when remote project window active. Release Notes: - Fixed remote projects not restoring previous locations and not refreshing the project panel on open. --- crates/workspace/src/workspace.rs | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 4290e12105a3c7..c7ba4ae3faa894 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5607,6 +5607,9 @@ pub fn join_dev_server_project( }) }); + let serialized_workspace: Option = + persistence::DB.workspace_for_dev_server_project(dev_server_project_id); + let workspace = if let Some(existing_workspace) = existing_workspace { existing_workspace } else { @@ -5620,10 +5623,7 @@ pub fn join_dev_server_project( ) .await?; - let serialized_workspace: Option = - persistence::DB.workspace_for_dev_server_project(dev_server_project_id); - - let workspace_id = if let Some(serialized_workspace) = serialized_workspace { + let workspace_id = if let Some(ref serialized_workspace) = serialized_workspace { serialized_workspace.id } else { persistence::DB.next_id().await? @@ -5650,10 +5650,13 @@ pub fn join_dev_server_project( } }; - workspace.update(&mut cx, |_, cx| { - cx.activate(true); - cx.activate_window(); - })?; + workspace + .update(&mut cx, |_, cx| { + cx.activate(true); + cx.activate_window(); + open_items(serialized_workspace, vec![], app_state, cx) + })? + .await?; anyhow::Ok(workspace) }) From fc9db97ac73e56288077b486e1d3d3618d5ee80e Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 10:02:35 +0200 Subject: [PATCH 315/762] client: Remove unused `fs` dependency (#18324) CI bot notified me about that in https://github.com/zed-industries/zed/pull/18323 Release Notes: - N/A --- Cargo.lock | 1 - crates/client/Cargo.toml | 1 - 2 files changed, 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9c6d2fb7b9b842..f1bc684401cb99 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2409,7 +2409,6 @@ dependencies = [ "cocoa 0.26.0", "collections", "feature_flags", - "fs", "futures 0.3.30", "gpui", "http_client", diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 8ae4f15c9796b1..dd420bbbe63184 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -23,7 +23,6 @@ chrono = { workspace = true, features = ["serde"] } clock.workspace = true collections.workspace = true feature_flags.workspace = true -fs.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true From eb71d2f1a8a606ac467e56c8d22e2d46818f87cf Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 10:03:10 +0200 Subject: [PATCH 316/762] zig: Fix highlighting of keywords like `orelse`, `or`, `and` (#18323) This changes the Zig highlights.scm to tag all keywords with `@keyword` and not with `@keyword.`, so the highlighting works properly. Closes #9355 Release Notes: - N/A Demo: ![screenshot-2024-09-25-09 32 20@2x](https://github.com/user-attachments/assets/567b8817-a522-4741-af7f-dcb1a79ddd40) --- extensions/zig/languages/zig/highlights.scm | 78 +++++++-------------- 1 file changed, 24 insertions(+), 54 deletions(-) diff --git a/extensions/zig/languages/zig/highlights.scm b/extensions/zig/languages/zig/highlights.scm index 152a66b670b442..aea2d34addb822 100644 --- a/extensions/zig/languages/zig/highlights.scm +++ b/extensions/zig/languages/zig/highlights.scm @@ -103,6 +103,7 @@ field_constant: (IDENTIFIER) @constant (BlockLabel (IDENTIFIER) @tag) [ + "fn" "asm" "defer" "errdefer" @@ -112,84 +113,53 @@ field_constant: (IDENTIFIER) @constant "enum" "opaque" "error" -] @keyword - -[ + "try" + "catch" + "for" + "while" + "break" + "continue" + "const" + "var" + "volatile" + "allowzero" + "noalias" + "addrspace" + "align" + "callconv" + "linksection" + "comptime" + "export" + "extern" + "inline" + "noinline" + "packed" + "pub" + "threadlocal" "async" "await" "suspend" "nosuspend" "resume" -] @keyword.coroutine - -[ - "fn" -] @keyword - -[ "and" "or" "orelse" -] @operator - -[ "return" -] @keyword.return - -[ "if" "else" "switch" -] @keyword.control - -[ - "for" - "while" - "break" - "continue" ] @keyword [ "usingnamespace" ] @constant -[ - "try" - "catch" -] @keyword - [ "anytype" "anyframe" (BuildinTypeExpr) ] @type -[ - "const" - "var" - "volatile" - "allowzero" - "noalias" -] @keyword - -[ - "addrspace" - "align" - "callconv" - "linksection" -] @keyword.storage - -[ - "comptime" - "export" - "extern" - "inline" - "noinline" - "packed" - "pub" - "threadlocal" -] @keyword - [ "null" "unreachable" From 7bb510971a0cc59b89ed5ed51cbacbf29f365c06 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 10:26:00 +0200 Subject: [PATCH 317/762] file picker: Use muted color for file icons (#18325) I think they were too much in-your-face. Muted looks better. Before: ![image](https://github.com/user-attachments/assets/73d6171a-6276-4450-acfb-52cd44fdfe59) After: ![image](https://github.com/user-attachments/assets/1d5f4524-b0b9-4ba6-ab66-5eaf619e58f9) Release Notes: - N/A --- crates/file_finder/src/file_finder.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 726a8bcb5e6983..1a65bd352d61d7 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1070,7 +1070,9 @@ impl PickerDelegate for FileFinderDelegate { self.labels_for_match(path_match, cx, ix); let file_icon = if settings.file_icons { - FileIcons::get_icon(Path::new(&file_name), cx).map(Icon::from_path) + FileIcons::get_icon(Path::new(&file_name), cx) + .map(Icon::from_path) + .map(|icon| icon.color(Color::Muted)) } else { None }; From 623a6eca75cb941ea7a368e133097605882efbb9 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 25 Sep 2024 11:34:27 +0200 Subject: [PATCH 318/762] git: Do not rescan .git on fsmonitor events (#18326) Fixes #16404 by ignoring events coming from .git/fsmonitor--daemon/cookies subdirectory. Closes #16404 Release Notes: - Improved performance in repositories using Git fsmonitor--daemon feature. --- crates/git/src/git.rs | 3 +++ crates/worktree/src/worktree.rs | 27 +++++++++++++++++++++++++-- 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index 20629899e8c0cf..fb204fba8266ab 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -18,6 +18,9 @@ pub mod repository; pub mod status; pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git")); +pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies")); +pub static FSMONITOR_DAEMON: LazyLock<&'static OsStr> = + LazyLock::new(|| OsStr::new("fsmonitor--daemon")); pub static GITIGNORE: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".gitignore")); #[derive(Clone, Copy, Eq, Hash, PartialEq)] diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index d8555b71a4f67c..550843e51e448c 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -22,7 +22,7 @@ use fuzzy::CharBag; use git::{ repository::{GitFileStatus, GitRepository, RepoPath}, status::GitStatus, - DOT_GIT, GITIGNORE, + COOKIES, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, }; use gpui::{ AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext, @@ -3707,9 +3707,32 @@ impl BackgroundScanner { let snapshot = &self.state.lock().snapshot; { let mut is_git_related = false; + + // We don't want to trigger .git rescan for events within .git/fsmonitor--daemon/cookies directory. + #[derive(PartialEq)] + enum FsMonitorParseState { + Cookies, + FsMonitor + } + let mut fsmonitor_parse_state = None; if let Some(dot_git_dir) = abs_path .ancestors() - .find(|ancestor| ancestor.file_name() == Some(*DOT_GIT)) + .find(|ancestor| { + let file_name = ancestor.file_name(); + if file_name == Some(*COOKIES) { + fsmonitor_parse_state = Some(FsMonitorParseState::Cookies); + false + } else if fsmonitor_parse_state == Some(FsMonitorParseState::Cookies) && file_name == Some(*FSMONITOR_DAEMON) { + fsmonitor_parse_state = Some(FsMonitorParseState::FsMonitor); + false + } else if fsmonitor_parse_state != Some(FsMonitorParseState::FsMonitor) && file_name == Some(*DOT_GIT) { + true + } else { + fsmonitor_parse_state.take(); + false + } + + }) { let dot_git_path = dot_git_dir .strip_prefix(&root_canonical_path) From 9d197ddc99b3b6e4c85f481cc45b0d33c170a494 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 12:03:24 +0200 Subject: [PATCH 319/762] ssh remoting: Fix SSH connection not being closed (#18329) This fixes the `SshSession` being leaked. There were two leaks: 1. `Arc` itself got leaked into the `SettingsObserver` that lives as long as the application. Fixed with a weak reference. 2. The two tasks spawned by an `SshSession` had a circular dependency and didn't exit while the other one was running. Fixed by fixing (1) and then attaching one of the tasks to the `SshSession`, which means it gets dropped with the session itself, which leads the other task to error and exit. Co-authored-by: Bennet Release Notes: - N/A --------- Co-authored-by: Bennet --- crates/project/src/project_settings.rs | 13 +++++--- crates/recent_projects/src/recent_projects.rs | 2 +- crates/remote/src/ssh_session.rs | 31 ++++++++++++++----- crates/rpc/src/proto_client.rs | 20 +++++++++++- crates/worktree/src/worktree.rs | 2 +- 5 files changed, 53 insertions(+), 15 deletions(-) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index d6f5600a551ef4..68593f8fab0525 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -334,17 +334,20 @@ impl SettingsObserver { .log_err(); } + let weak_client = ssh.downgrade(); cx.observe_global::(move |_, cx| { let new_settings = cx.global::().raw_user_settings(); if &settings != new_settings { settings = new_settings.clone() } if let Some(content) = serde_json::to_string(&settings).log_err() { - ssh.send(proto::UpdateUserSettings { - project_id: 0, - content, - }) - .log_err(); + if let Some(ssh) = weak_client.upgrade() { + ssh.send(proto::UpdateUserSettings { + project_id: 0, + content, + }) + .log_err(); + } } }) .detach(); diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 20393d63e1a3df..570e9a565c28e6 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -509,7 +509,7 @@ impl PickerDelegate for RecentProjectsDelegate { .color(Color::Muted) .into_any_element() } - SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Screen) + SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Server) .color(Color::Muted) .into_any_element(), SerializedWorkspaceLocation::DevServer(_) => { diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 2bd18aa37e19d1..9d9d916f19b458 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -11,7 +11,7 @@ use futures::{ future::BoxFuture, select_biased, AsyncReadExt as _, AsyncWriteExt as _, Future, FutureExt as _, StreamExt as _, }; -use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion}; +use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion, Task}; use parking_lot::Mutex; use rpc::{ proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage}, @@ -51,6 +51,7 @@ pub struct SshSession { spawn_process_tx: mpsc::UnboundedSender, client_socket: Option, state: Mutex, // Lock + _io_task: Option>>, } struct SshClientState { @@ -173,8 +174,7 @@ impl SshSession { let mut child_stdout = remote_server_child.stdout.take().unwrap(); let mut child_stdin = remote_server_child.stdin.take().unwrap(); - let executor = cx.background_executor().clone(); - executor.clone().spawn(async move { + let io_task = cx.background_executor().spawn(async move { let mut stdin_buffer = Vec::new(); let mut stdout_buffer = Vec::new(); let mut stderr_buffer = Vec::new(); @@ -264,9 +264,18 @@ impl SshSession { } } } - }).detach(); + }); - cx.update(|cx| Self::new(incoming_rx, outgoing_tx, spawn_process_tx, Some(socket), cx)) + cx.update(|cx| { + Self::new( + incoming_rx, + outgoing_tx, + spawn_process_tx, + Some(socket), + Some(io_task), + cx, + ) + }) } pub fn server( @@ -275,7 +284,7 @@ impl SshSession { cx: &AppContext, ) -> Arc { let (tx, _rx) = mpsc::unbounded(); - Self::new(incoming_rx, outgoing_tx, tx, None, cx) + Self::new(incoming_rx, outgoing_tx, tx, None, None, cx) } #[cfg(any(test, feature = "test-support"))] @@ -293,6 +302,7 @@ impl SshSession { client_to_server_tx, tx.clone(), None, // todo() + None, cx, ) }), @@ -302,6 +312,7 @@ impl SshSession { server_to_client_tx, tx.clone(), None, + None, cx, ) }), @@ -313,6 +324,7 @@ impl SshSession { outgoing_tx: mpsc::UnboundedSender, spawn_process_tx: mpsc::UnboundedSender, client_socket: Option, + io_task: Option>>, cx: &AppContext, ) -> Arc { let this = Arc::new(Self { @@ -322,13 +334,18 @@ impl SshSession { spawn_process_tx, client_socket, state: Default::default(), + _io_task: io_task, }); cx.spawn(|cx| { - let this = this.clone(); + let this = Arc::downgrade(&this); async move { let peer_id = PeerId { owner_id: 0, id: 0 }; while let Some(incoming) = incoming_rx.next().await { + let Some(this) = this.upgrade() else { + return anyhow::Ok(()); + }; + if let Some(request_id) = incoming.responding_to { let request_id = MessageId(request_id); let sender = this.response_channels.lock().remove(&request_id); diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 88099102765ed7..56b13688bad2b6 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -10,11 +10,29 @@ use proto::{ error::ErrorExt as _, AnyTypedEnvelope, EntityMessage, Envelope, EnvelopedMessage, RequestMessage, TypedEnvelope, }; -use std::{any::TypeId, sync::Arc}; +use std::{ + any::TypeId, + sync::{Arc, Weak}, +}; #[derive(Clone)] pub struct AnyProtoClient(Arc); +impl AnyProtoClient { + pub fn downgrade(&self) -> AnyWeakProtoClient { + AnyWeakProtoClient(Arc::downgrade(&self.0)) + } +} + +#[derive(Clone)] +pub struct AnyWeakProtoClient(Weak); + +impl AnyWeakProtoClient { + pub fn upgrade(&self) -> Option { + self.0.upgrade().map(AnyProtoClient) + } +} + pub trait ProtoClient: Send + Sync { fn request( &self, diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 550843e51e448c..f91a832b80d783 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -472,7 +472,7 @@ impl Worktree { disconnected: false, }; - // Apply updates to a separate snapshto in a background task, then + // Apply updates to a separate snapshot in a background task, then // send them to a foreground task which updates the model. cx.background_executor() .spawn(async move { From a6cb17fb51bd3dca2a8c68bacfc8384234c10105 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 25 Sep 2024 12:27:57 +0200 Subject: [PATCH 320/762] chore: Fix violations of `elided_named_lifetimes` (#18330) I compile Zed from nightly build pretty often and I've noticed that we're getting a few hits on new rustc lint: https://github.com/rust-lang/rust/pull/129207 Release Notes: - N/A --- crates/editor/src/display_map/crease_map.rs | 2 +- crates/editor/src/editor.rs | 2 +- crates/language/src/syntax_map.rs | 6 +++--- crates/project/src/project.rs | 2 +- crates/sum_tree/src/sum_tree.rs | 4 ++-- crates/sum_tree/src/tree_map.rs | 2 +- crates/workspace/src/workspace.rs | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index bfc9c7d1a4ffbf..c3f2b0061ac732 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -69,7 +69,7 @@ impl CreaseSnapshot { &'a self, range: Range, snapshot: &'a MultiBufferSnapshot, - ) -> impl '_ + Iterator { + ) -> impl 'a + Iterator { let start = snapshot.anchor_before(Point::new(range.start.0, 0)); let mut cursor = self.creases.cursor::(snapshot); cursor.seek(&start, Bias::Left, snapshot); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b54889dc0d8c26..ad5cd24d73ac46 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11515,7 +11515,7 @@ impl Editor { &'a self, position: Anchor, buffer: &'a MultiBufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator> { let read_highlights = self .background_highlights .get(&TypeId::of::()) diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index daae54fb4da62a..55177f79620db7 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -794,7 +794,7 @@ impl SyntaxSnapshot { range: Range, buffer: &'a BufferSnapshot, query: fn(&Grammar) -> Option<&Query>, - ) -> SyntaxMapCaptures { + ) -> SyntaxMapCaptures<'a> { SyntaxMapCaptures::new( range.clone(), buffer.as_rope(), @@ -808,7 +808,7 @@ impl SyntaxSnapshot { range: Range, buffer: &'a BufferSnapshot, query: fn(&Grammar) -> Option<&Query>, - ) -> SyntaxMapMatches { + ) -> SyntaxMapMatches<'a> { SyntaxMapMatches::new( range.clone(), buffer.as_rope(), @@ -828,7 +828,7 @@ impl SyntaxSnapshot { range: Range, buffer: &'a BufferSnapshot, include_hidden: bool, - ) -> impl 'a + Iterator { + ) -> impl 'a + Iterator> { let start_offset = range.start.to_offset(buffer); let end_offset = range.end.to_offset(buffer); let start = buffer.anchor_before(start_offset); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 5a9b235d91cdbf..ee7f93a4f933c2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3954,7 +3954,7 @@ impl Project { pub fn supplementary_language_servers<'a>( &'a self, cx: &'a AppContext, - ) -> impl '_ + Iterator { + ) -> impl 'a + Iterator { self.lsp_store.read(cx).supplementary_language_servers() } diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 965413d3190aa2..7013dc66fd1ed7 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -345,7 +345,7 @@ impl SumTree { Iter::new(self) } - pub fn cursor<'a, S>(&'a self, cx: &::Context) -> Cursor + pub fn cursor<'a, S>(&'a self, cx: &::Context) -> Cursor<'a, T, S> where S: Dimension<'a, T::Summary>, { @@ -358,7 +358,7 @@ impl SumTree { &'a self, cx: &::Context, filter_node: F, - ) -> FilterCursor + ) -> FilterCursor<'a, F, T, U> where F: FnMut(&T::Summary) -> bool, U: Dimension<'a, T::Summary>, diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index b7eadb566d3ed7..c57226b681432a 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -105,7 +105,7 @@ impl TreeMap { cursor.item().map(|item| (&item.key, &item.value)) } - pub fn iter_from<'a>(&'a self, from: &'a K) -> impl Iterator + '_ { + pub fn iter_from<'a>(&'a self, from: &'a K) -> impl Iterator + 'a { let mut cursor = self.0.cursor::>(&()); let from_key = MapKeyRef(Some(from)); cursor.seek(&from_key, Bias::Left, &()); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index c7ba4ae3faa894..4d656294703d42 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -2119,7 +2119,7 @@ impl Workspace { pub fn items<'a>( &'a self, cx: &'a AppContext, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator> { self.panes.iter().flat_map(|pane| pane.read(cx).items()) } From 300bf87f77d3ae4eea93affc088b7f6b4979a277 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 12:45:53 +0200 Subject: [PATCH 321/762] ssh remoting: Kill SSH master process when dropping client (#18331) This was a process leak. Since we use `.spawn()`, the process continued to run in the background, even if our `SshClientState` was dropped. Means we need to manually clean it up. Release Notes: - N/A Co-authored-by: Bennet --- crates/remote/src/ssh_session.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 9d9d916f19b458..06a7f810e67210 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -56,7 +56,7 @@ pub struct SshSession { struct SshClientState { socket: SshSocket, - _master_process: process::Child, + master_process: process::Child, _temp_dir: TempDir, } @@ -593,7 +593,7 @@ impl SshClientState { connection_options, socket_path, }, - _master_process: master_process, + master_process, _temp_dir: temp_dir, }) } @@ -716,6 +716,14 @@ impl SshClientState { } } +impl Drop for SshClientState { + fn drop(&mut self) { + if let Err(error) = self.master_process.kill() { + log::error!("failed to kill SSH master process: {}", error); + } + } +} + impl SshSocket { fn ssh_command>(&self, program: S) -> process::Command { let mut command = process::Command::new("ssh"); From 4e2ae06ca6c467e5ff50600bb85cf7452d46ee92 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 12:59:22 +0200 Subject: [PATCH 322/762] recent project: Fix highlighting for matches in SSH projs (#18332) Release Notes: - N/A Co-authored-by: Bennet --- crates/recent_projects/src/recent_projects.rs | 26 +++++-------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 570e9a565c28e6..f73e7069d48a94 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -259,23 +259,12 @@ impl PickerDelegate for RecentProjectsDelegate { dev_server_project.paths.join("") ) } - SerializedWorkspaceLocation::Ssh(ssh_project) => { - format!( - "{}{}{}{}", - ssh_project.host, - ssh_project - .port - .as_ref() - .map(|port| port.to_string()) - .unwrap_or_default(), - ssh_project.paths.join(","), - ssh_project - .user - .as_ref() - .map(|user| user.to_string()) - .unwrap_or_default() - ) - } + SerializedWorkspaceLocation::Ssh(ssh_project) => ssh_project + .ssh_urls() + .iter() + .map(|path| path.to_string_lossy().to_string()) + .collect::>() + .join(""), }; StringMatchCandidate::new(id, combined_string) @@ -458,6 +447,7 @@ impl PickerDelegate for RecentProjectsDelegate { .order() .iter() .filter_map(|i| paths.paths().get(*i).cloned()) + .map(|path| path.compact()) .collect(), ), SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()), @@ -473,7 +463,6 @@ impl PickerDelegate for RecentProjectsDelegate { let (match_labels, paths): (Vec<_>, Vec<_>) = paths .iter() .map(|path| { - let path = path.compact(); let highlighted_text = highlights_for_path(path.as_ref(), &hit.positions, path_start_offset); @@ -704,7 +693,6 @@ fn highlights_for_path( }, ) } - impl RecentProjectsDelegate { fn delete_recent_project(&self, ix: usize, cx: &mut ViewContext>) { if let Some(selected_match) = self.matches.get(ix) { From ccc871c44c3085eef65bf4bcc3603b938691e557 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 13:41:18 +0200 Subject: [PATCH 323/762] ssh remoting: Expand tilde on host side (#18333) --- crates/project/src/worktree_store.rs | 5 +++-- crates/recent_projects/src/ssh_connections.rs | 9 ++++++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 9f25572fc7ec08..4b1764c3a87fa1 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -221,10 +221,11 @@ impl WorktreeStore { ) -> Task, Arc>> { let mut abs_path = abs_path.as_ref().to_string_lossy().to_string(); // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/` - // in which case want to strip the leading the `/` and expand the tilde. + // in which case want to strip the leading the `/`. + // On the host-side, the `~` will get expanded. // That's what git does too: https://github.com/libgit2/libgit2/issues/3345#issuecomment-127050850 if abs_path.starts_with("/~") { - abs_path = shellexpand::tilde(&abs_path[1..]).to_string(); + abs_path = abs_path[1..].to_string(); } let root_name = PathBuf::from(abs_path.clone()) .file_name() diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index ad23a5c8963b4e..1722c58f075399 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -327,7 +327,14 @@ impl SshClientDelegate { cx, ) .await - .map_err(|e| anyhow::anyhow!("failed to download remote server binary: {}", e))?; + .map_err(|e| { + anyhow::anyhow!( + "failed to download remote server binary (os: {}, arch: {}): {}", + platform.os, + platform.arch, + e + ) + })?; Ok((binary_path, version)) } From 59dc3985a1afa338720912734972f600178a8a85 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Sep 2024 11:41:35 +0000 Subject: [PATCH 324/762] Detect txt files as Plain Text (#18334) --- assets/settings/default.json | 1 + 1 file changed, 1 insertion(+) diff --git a/assets/settings/default.json b/assets/settings/default.json index 3e8d3c8c70dd7d..61239b002bedcb 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -762,6 +762,7 @@ // } // "file_types": { + "Plain Text": ["txt"], "JSON": ["flake.lock"], "JSONC": [ "**/.zed/**/*.json", From 500c3c54a64df3b119e1bd8b0a63822f45d2f4c9 Mon Sep 17 00:00:00 2001 From: Joseph T Lyons Date: Wed, 25 Sep 2024 11:02:40 -0400 Subject: [PATCH 325/762] v0.156.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f1bc684401cb99..41b2d6d452af53 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14388,7 +14388,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.155.0" +version = "0.156.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 65724480f62334..eb8f45d92e476d 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.155.0" +version = "0.156.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From bbf5ed2ba158b5a3cf36d4cb83df4ec471728248 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 25 Sep 2024 09:42:07 -0600 Subject: [PATCH 326/762] Fix collab filtering panics better (#18344) Release Notes: - N/A --- crates/collab/src/api/events.rs | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 1632c2d798ef15..377741f434c2f8 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -364,21 +364,19 @@ pub async fn post_panic( } fn report_to_slack(panic: &Panic) -> bool { - if panic.os_name == "Linux" { - if panic.payload.contains("ERROR_SURFACE_LOST_KHR") { - return false; - } + if panic.payload.contains("ERROR_SURFACE_LOST_KHR") { + return false; + } - if panic.payload.contains("ERROR_INITIALIZATION_FAILED") { - return false; - } + if panic.payload.contains("ERROR_INITIALIZATION_FAILED") { + return false; + } - if panic - .payload - .contains("GPU has crashed, and no debug information is available") - { - return false; - } + if panic + .payload + .contains("GPU has crashed, and no debug information is available") + { + return false; } true From 9300dbc83494d2dbcadd9dcd4373a30bfe53a6e4 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 25 Sep 2024 12:04:17 -0400 Subject: [PATCH 327/762] Fix typo (#18345) Release Notes: - N/A --- crates/vim/src/command.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 67a674afa6f127..49e739faadfeaf 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -757,7 +757,7 @@ mod test { cx.simulate_shared_keystrokes(": j enter").await; - // hack: our cursor positionining after a join command is wrong + // hack: our cursor positioning after a join command is wrong cx.simulate_shared_keystrokes("^").await; cx.shared_state().await.assert_eq(indoc! { "ˇa b From 19162c316083890f999ef6d33e877856a4235df6 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Wed, 25 Sep 2024 18:08:34 +0200 Subject: [PATCH 328/762] ssh remoting: Show error message if project path does not exist (#18343) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This now shows an error message if you try open a project over SSH that doesn't exist. If it's a possible file-path though, it acts like Zed's `cli` and opens the file so that it can be created. - Works: `cargo run ssh://127.0.0.1/~/folder-exists/file-does-not-exist` — this will open `file-does-not-exist` - Shows error: `cargo run ssh://127.0.0.1/~/folder-does-not-exist/file-does-not-exist` — this will show an error Release Notes: - N/A Co-authored-by: Bennet Co-authored-by: Conrad --- crates/project/src/worktree_store.rs | 4 +-- crates/remote_server/Cargo.toml | 1 + crates/remote_server/src/headless_project.rs | 25 ++++++++++++++- crates/workspace/src/workspace.rs | 33 +++++++++++++++----- 4 files changed, 53 insertions(+), 10 deletions(-) diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 4b1764c3a87fa1..e445eab2dd6392 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -18,7 +18,7 @@ use gpui::{ use postage::oneshot; use rpc::{ proto::{self, SSH_PROJECT_ID}, - AnyProtoClient, TypedEnvelope, + AnyProtoClient, ErrorExt, TypedEnvelope, }; use smol::{ channel::{Receiver, Sender}, @@ -207,7 +207,7 @@ impl WorktreeStore { cx.background_executor().spawn(async move { match task.await { Ok(worktree) => Ok(worktree), - Err(err) => Err(anyhow!("{}", err)), + Err(err) => Err((*err).cloned()), } }) } diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index ed12b41167cc23..64db2616e9b2f8 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -22,6 +22,7 @@ test-support = ["fs/test-support"] [dependencies] anyhow.workspace = true +client.workspace = true env_logger.workspace = true fs.workspace = true futures.workspace = true diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 0af0d6bb1570dd..84fb22b282d37d 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -189,11 +189,34 @@ impl HeadlessProject { message: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { + use client::ErrorCodeExt; let path = shellexpand::tilde(&message.payload.path).to_string(); + + let fs = this.read_with(&mut cx, |this, _| this.fs.clone())?; + let path = PathBuf::from(path); + + let canonicalized = match fs.canonicalize(&path).await { + Ok(path) => path, + Err(e) => { + let mut parent = path + .parent() + .ok_or(e) + .map_err(|_| anyhow!("{:?} does not exist", path))?; + if parent == Path::new("") { + parent = util::paths::home_dir(); + } + let parent = fs.canonicalize(parent).await.map_err(|_| { + anyhow!(proto::ErrorCode::DevServerProjectPathDoesNotExist + .with_tag("path", &path.to_string_lossy().as_ref())) + })?; + parent.join(path.file_name().unwrap()) + } + }; + let worktree = this .update(&mut cx.clone(), |this, _| { Worktree::local( - Path::new(&path), + Arc::from(canonicalized), true, this.fs.clone(), this.next_entry_id.clone(), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 4d656294703d42..cec913851f04df 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5544,12 +5544,21 @@ pub fn open_ssh_project( ) })?; + let mut project_paths_to_open = vec![]; + let mut project_path_errors = vec![]; + for path in paths { - project - .update(&mut cx, |project, cx| { - project.find_or_create_worktree(&path, true, cx) - })? - .await?; + let result = cx + .update(|cx| Workspace::project_path_for_path(project.clone(), &path, true, cx))? + .await; + match result { + Ok((_, project_path)) => { + project_paths_to_open.push((path.clone(), Some(project_path))); + } + Err(error) => { + project_path_errors.push(error); + } + }; } let serialized_workspace = @@ -5576,11 +5585,21 @@ pub fn open_ssh_project( .update(&mut cx, |_, cx| { cx.activate_window(); - open_items(serialized_workspace, vec![], app_state, cx) + open_items(serialized_workspace, project_paths_to_open, app_state, cx) })? .await?; - Ok(()) + window.update(&mut cx, |workspace, cx| { + for error in project_path_errors { + if error.error_code() == proto::ErrorCode::DevServerProjectPathDoesNotExist { + if let Some(path) = error.error_tag("path") { + workspace.show_error(&anyhow!("'{path}' does not exist"), cx) + } + } else { + workspace.show_error(&error, cx) + } + } + }) }) } From 1f54fde4d2338730eecd46501688b8e777c7bb5c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8B=90=E7=8B=B8?= <134658521+Huliiiiii@users.noreply.github.com> Date: Thu, 26 Sep 2024 01:29:02 +0800 Subject: [PATCH 329/762] toml: Add highlight for escape sequences (#18346) --- extensions/toml/languages/toml/highlights.scm | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/extensions/toml/languages/toml/highlights.scm b/extensions/toml/languages/toml/highlights.scm index 04d83b545925d7..4be265cce74b3d 100644 --- a/extensions/toml/languages/toml/highlights.scm +++ b/extensions/toml/languages/toml/highlights.scm @@ -9,9 +9,10 @@ (boolean) @constant (comment) @comment -(string) @string (integer) @number (float) @number +(string) @string +(escape_sequence) @string.escape (offset_date_time) @string.special (local_date_time) @string.special (local_date) @string.special From dc48af0ca1d5297fac94c7d02bb858d564a6542b Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 25 Sep 2024 11:45:56 -0600 Subject: [PATCH 330/762] lsp: Remove reinstall, update config (#18318) Release Notes: - Fixed overriding the path of a language server binary for all language servers. `{"lsp":{"":{"binary":{"path": "_"}}}}` will now work for all language servers including those defined by extensions. - (breaking change) To disable finding lsp adapters in your path, you must now specify `{"lsp":{"":{"binary":{"ignore_system_version": true}}}}`. Previously this was `{"lsp":{"":{"binary":{"path_lookup": false}}}}`. Note that this setting still does not apply to extensions. - Removed automatic reinstallation of language servers. (It mostly didn't work) --------- Co-authored-by: Mikayla --- assets/settings/default.json | 2 +- .../src/activity_indicator.rs | 2 +- crates/extension/src/extension_lsp_adapter.rs | 15 +- crates/language/src/language.rs | 73 +- crates/language/src/language_registry.rs | 190 ++---- crates/languages/src/c.rs | 54 +- crates/languages/src/css.rs | 7 - crates/languages/src/go.rs | 54 +- crates/languages/src/json.rs | 19 - crates/languages/src/python.rs | 7 - crates/languages/src/rust.rs | 104 +-- crates/languages/src/tailwind.rs | 39 -- crates/languages/src/typescript.rs | 20 - crates/languages/src/vtsls.rs | 50 +- crates/languages/src/yaml.rs | 38 -- crates/lsp/src/lsp.rs | 9 + crates/project/src/lsp_store.rs | 628 ++++++++---------- crates/project/src/project.rs | 8 - crates/project/src/project_settings.rs | 4 +- crates/zed/src/main.rs | 2 +- docs/src/languages/rust.md | 4 +- 21 files changed, 397 insertions(+), 932 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 61239b002bedcb..cf0de6a5e7f9aa 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -783,7 +783,7 @@ /// or to ensure Zed always downloads and installs an isolated version of node: /// { /// "node": { - /// "disable_path_lookup": true + /// "ignore_system_version": true, /// } /// NOTE: changing this setting currently requires restarting Zed. "node": {}, diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 52e6acc393d299..ace972bf877183 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -299,7 +299,7 @@ impl ActivityIndicator { .into_any_element(), ), message: format!( - "Failed to download {}. Click to show error.", + "Failed to run {}. Click to show error.", failed .iter() .map(|name| name.0.as_ref()) diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs index d6125241f11a38..25179acec69ed0 100644 --- a/crates/extension/src/extension_lsp_adapter.rs +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -10,16 +10,11 @@ use gpui::AsyncAppContext; use language::{ CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate, }; -use lsp::{CodeActionKind, LanguageServerBinary}; +use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use serde::Serialize; use serde_json::Value; use std::ops::Range; -use std::{ - any::Any, - path::{Path, PathBuf}, - pin::Pin, - sync::Arc, -}; +use std::{any::Any, path::PathBuf, pin::Pin, sync::Arc}; use util::{maybe, ResultExt}; use wasmtime_wasi::WasiView as _; @@ -38,8 +33,8 @@ impl LspAdapter for ExtensionLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Option>, delegate: Arc, + _: LanguageServerBinaryOptions, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, ) -> Pin>>> { @@ -124,10 +119,6 @@ impl LspAdapter for ExtensionLspAdapter { unreachable!("get_language_server_command is overridden") } - async fn installation_test_binary(&self, _: PathBuf) -> Option { - None - } - fn code_action_kinds(&self) -> Option> { let code_action_kinds = self .extension diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index d70650cf449356..4c75ef4eeb38da 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -29,7 +29,7 @@ use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task}; pub use highlight_map::HighlightMap; use http_client::HttpClient; pub use language_registry::LanguageName; -use lsp::{CodeActionKind, LanguageServerBinary}; +use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use parking_lot::Mutex; use regex::Regex; use schemars::{ @@ -69,7 +69,7 @@ pub use buffer::*; pub use diagnostic_set::DiagnosticEntry; pub use language_registry::{ AvailableLanguage, LanguageNotFound, LanguageQueries, LanguageRegistry, - LanguageServerBinaryStatus, PendingLanguageServer, QUERY_FILENAME_PREFIXES, + LanguageServerBinaryStatus, QUERY_FILENAME_PREFIXES, }; pub use lsp::LanguageServerId; pub use outline::*; @@ -249,28 +249,17 @@ impl CachedLspAdapter { pub async fn get_language_server_command( self: Arc, - container_dir: Option>, delegate: Arc, + binary_options: LanguageServerBinaryOptions, cx: &mut AsyncAppContext, ) -> Result { let cached_binary = self.cached_binary.lock().await; self.adapter .clone() - .get_language_server_command(container_dir, delegate, cached_binary, cx) + .get_language_server_command(delegate, binary_options, cached_binary, cx) .await } - pub fn can_be_reinstalled(&self) -> bool { - self.adapter.can_be_reinstalled() - } - - pub async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - self.adapter.installation_test_binary(container_dir).await - } - pub fn code_action_kinds(&self) -> Option> { self.adapter.code_action_kinds() } @@ -322,6 +311,7 @@ pub trait LspAdapterDelegate: Send + Sync { fn worktree_id(&self) -> WorktreeId; fn worktree_root_path(&self) -> &Path; fn update_status(&self, language: LanguageServerName, status: LanguageServerBinaryStatus); + async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option>; async fn which(&self, command: &OsStr) -> Option; async fn shell_env(&self) -> HashMap; @@ -335,8 +325,8 @@ pub trait LspAdapter: 'static + Send + Sync { fn get_language_server_command<'a>( self: Arc, - container_dir: Option>, delegate: Arc, + binary_options: LanguageServerBinaryOptions, mut cached_binary: futures::lock::MutexGuard<'a, Option>, cx: &'a mut AsyncAppContext, ) -> Pin>>> { @@ -352,30 +342,30 @@ pub trait LspAdapter: 'static + Send + Sync { // We only want to cache when we fall back to the global one, // because we don't want to download and overwrite our global one // for each worktree we might have open. - if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), cx).await { - log::info!( - "found user-installed language server for {}. path: {:?}, arguments: {:?}", - self.name().0, - binary.path, - binary.arguments - ); - return Ok(binary); + if binary_options.allow_path_lookup { + if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), cx).await { + log::info!( + "found user-installed language server for {}. path: {:?}, arguments: {:?}", + self.name().0, + binary.path, + binary.arguments + ); + return Ok(binary); + } + } + + if !binary_options.allow_binary_download { + return Err(anyhow!("downloading language servers disabled")); } if let Some(cached_binary) = cached_binary.as_ref() { return Ok(cached_binary.clone()); } - let Some(container_dir) = container_dir else { + let Some(container_dir) = delegate.language_server_download_dir(&self.name()).await else { anyhow::bail!("cannot download language servers for remotes (yet)") }; - if !container_dir.exists() { - smol::fs::create_dir_all(&container_dir) - .await - .context("failed to create container directory")?; - } - let mut binary = try_fetch_server_binary(self.as_ref(), &delegate, container_dir.to_path_buf(), cx).await; if let Err(error) = binary.as_ref() { @@ -443,21 +433,6 @@ pub trait LspAdapter: 'static + Send + Sync { delegate: &dyn LspAdapterDelegate, ) -> Option; - /// Returns `true` if a language server can be reinstalled. - /// - /// If language server initialization fails, a reinstallation will be attempted unless the value returned from this method is `false`. - /// - /// Implementations that rely on software already installed on user's system - /// should have [`can_be_reinstalled`](Self::can_be_reinstalled) return `false`. - fn can_be_reinstalled(&self) -> bool { - true - } - - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option; - fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {} /// Post-processes completions provided by the language server. @@ -1711,8 +1686,8 @@ impl LspAdapter for FakeLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Option>, _: Arc, + _: LanguageServerBinaryOptions, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, ) -> Pin>>> { @@ -1743,10 +1718,6 @@ impl LspAdapter for FakeLspAdapter { unreachable!(); } - async fn installation_test_binary(&self, _: PathBuf) -> Option { - unreachable!(); - } - fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {} fn disk_based_diagnostic_sources(&self) -> Vec { diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index e264517d5b0300..880ae3b6115c37 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -4,18 +4,17 @@ use crate::{ }, task_context::ContextProvider, with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher, - LanguageServerName, LspAdapter, LspAdapterDelegate, PLAIN_TEXT, + LanguageServerName, LspAdapter, PLAIN_TEXT, }; use anyhow::{anyhow, Context, Result}; use collections::{hash_map, HashMap, HashSet}; use futures::{ channel::{mpsc, oneshot}, - future::Shared, Future, }; use globset::GlobSet; -use gpui::{AppContext, BackgroundExecutor, Task}; +use gpui::{AppContext, BackgroundExecutor}; use lsp::LanguageServerId; use parking_lot::{Mutex, RwLock}; use postage::watch; @@ -118,12 +117,6 @@ pub enum LanguageServerBinaryStatus { Failed { error: String }, } -pub struct PendingLanguageServer { - pub server_id: LanguageServerId, - pub task: Task)>>, - pub container_dir: Option>, -} - #[derive(Clone)] pub struct AvailableLanguage { id: LanguageId, @@ -882,123 +875,53 @@ impl LanguageRegistry { self.lsp_binary_status_tx.send(server_name, status); } - #[allow(clippy::too_many_arguments)] - pub fn create_pending_language_server( - self: &Arc, - stderr_capture: Arc>>, - _language_name_for_tests: LanguageName, - adapter: Arc, - root_path: Arc, - delegate: Arc, - project_environment: Shared>>>, - cx: &mut AppContext, - ) -> Option { - let server_id = self.state.write().next_language_server_id(); - log::info!( - "attempting to start language server {:?}, path: {root_path:?}, id: {server_id}", - adapter.name.0 - ); + pub fn next_language_server_id(&self) -> LanguageServerId { + self.state.write().next_language_server_id() + } - let container_dir: Option> = self - .language_server_download_dir + pub fn language_server_download_dir(&self, name: &LanguageServerName) -> Option> { + self.language_server_download_dir .as_ref() - .map(|dir| Arc::from(dir.join(adapter.name.0.as_ref()))); - let root_path = root_path.clone(); - let this = Arc::downgrade(self); - - let task = cx.spawn({ - let container_dir = container_dir.clone(); - move |mut cx| async move { - let project_environment = project_environment.await; - - let binary_result = adapter - .clone() - .get_language_server_command(container_dir, delegate.clone(), &mut cx) - .await; - - delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None); - - let mut binary = binary_result?; - - // If we do have a project environment (either by spawning a shell in in the project directory - // or by getting it from the CLI) and the language server command itself - // doesn't have an environment (which it would have, if it was found in $PATH), then - // we use the project environment. - if binary.env.is_none() && project_environment.is_some() { - log::info!( - "using project environment for language server {:?}, id: {server_id}", - adapter.name.0 - ); - binary.env = project_environment.clone(); - } - - let options = adapter - .adapter - .clone() - .initialization_options(&delegate) - .await?; + .map(|dir| Arc::from(dir.join(name.0.as_ref()))) + } - #[cfg(any(test, feature = "test-support"))] - if true { - if let Some(this) = this.upgrade() { - if let Some(fake_entry) = this - .state - .write() - .fake_server_entries - .get_mut(&adapter.name) - { - let (server, mut fake_server) = lsp::FakeLanguageServer::new( - server_id, - binary, - adapter.name.0.to_string(), - fake_entry.capabilities.clone(), - cx.clone(), - ); - fake_entry._server = Some(fake_server.clone()); - - if let Some(initializer) = &fake_entry.initializer { - initializer(&mut fake_server); - } + #[cfg(any(test, feature = "test-support"))] + pub fn create_fake_language_server( + &self, + server_id: LanguageServerId, + name: &LanguageServerName, + binary: lsp::LanguageServerBinary, + cx: gpui::AsyncAppContext, + ) -> Option { + let mut state = self.state.write(); + let fake_entry = state.fake_server_entries.get_mut(&name)?; + let (server, mut fake_server) = lsp::FakeLanguageServer::new( + server_id, + binary, + name.0.to_string(), + fake_entry.capabilities.clone(), + cx.clone(), + ); + fake_entry._server = Some(fake_server.clone()); - let tx = fake_entry.tx.clone(); - cx.background_executor() - .spawn(async move { - if fake_server - .try_receive_notification::( - ) - .await - .is_some() - { - tx.unbounded_send(fake_server.clone()).ok(); - } - }) - .detach(); + if let Some(initializer) = &fake_entry.initializer { + initializer(&mut fake_server); + } - return Ok((server, options)); - } - } + let tx = fake_entry.tx.clone(); + cx.background_executor() + .spawn(async move { + if fake_server + .try_receive_notification::() + .await + .is_some() + { + tx.unbounded_send(fake_server.clone()).ok(); } + }) + .detach(); - drop(this); - Ok(( - lsp::LanguageServer::new( - stderr_capture, - server_id, - binary, - &root_path, - adapter.code_action_kinds(), - cx, - )?, - options, - )) - } - }); - - Some(PendingLanguageServer { - server_id, - task, - container_dir, - }) + Some(server) } pub fn language_server_binary_statuses( @@ -1007,29 +930,16 @@ impl LanguageRegistry { self.lsp_binary_status_tx.subscribe() } - pub fn delete_server_container( - &self, - adapter: Arc, - cx: &mut AppContext, - ) -> Task<()> { + pub async fn delete_server_container(&self, name: LanguageServerName) { log::info!("deleting server container"); + let Some(dir) = self.language_server_download_dir(&name) else { + return; + }; - let download_dir = self - .language_server_download_dir - .clone() - .expect("language server download directory has not been assigned before deleting server container"); - - cx.spawn(|_| async move { - let container_dir = download_dir.join(adapter.name.0.as_ref()); - smol::fs::remove_dir_all(container_dir) - .await - .context("server container removal") - .log_err(); - }) - } - - pub fn next_language_server_id(&self) -> LanguageServerId { - self.state.write().next_language_server_id() + smol::fs::remove_dir_all(dir) + .await + .context("server container removal") + .log_err(); } } diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index 8a04e0aae6f4ef..28a12b5310f38a 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -5,7 +5,6 @@ use gpui::AsyncAppContext; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; pub use language::*; use lsp::LanguageServerBinary; -use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use smol::fs::{self, File}; use std::{any::Any, env::consts, path::PathBuf, sync::Arc}; use util::{fs::remove_matching, maybe, ResultExt}; @@ -25,41 +24,14 @@ impl super::LspAdapter for CLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, + _: &AsyncAppContext, ) -> Option { - let configured_binary = cx.update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) - }); - - match configured_binary { - Ok(Some(BinarySettings { - path: Some(path), - arguments, - .. - })) => Some(LanguageServerBinary { - path: path.into(), - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), - env: None, - }), - Ok(Some(BinarySettings { - path_lookup: Some(false), - .. - })) => None, - _ => { - let env = delegate.shell_env().await; - let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; - Some(LanguageServerBinary { - path, - arguments: vec![], - env: Some(env), - }) - } - } + let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; + Some(LanguageServerBinary { + path, + arguments: vec![], + env: None, + }) } async fn fetch_latest_server_version( @@ -141,18 +113,6 @@ impl super::LspAdapter for CLspAdapter { get_cached_server_binary(container_dir).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir) - .await - .map(|mut binary| { - binary.arguments = vec!["--help".into()]; - binary - }) - } - async fn label_for_completion( &self, completion: &lsp::CompletionItem, diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index 7b7e9ae77f06fc..b4e5feaab76c99 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -84,13 +84,6 @@ impl LspAdapter for CssLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn initialization_options( self: Arc, _: &Arc, diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index a1a996c066ee4b..135c080e00a149 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -6,7 +6,6 @@ use gpui::{AppContext, AsyncAppContext, Task}; use http_client::github::latest_github_release; pub use language::*; use lsp::LanguageServerBinary; -use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use regex::Regex; use serde_json::json; use smol::{fs, process}; @@ -68,41 +67,14 @@ impl super::LspAdapter for GoLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, + _: &AsyncAppContext, ) -> Option { - let configured_binary = cx.update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) - }); - - match configured_binary { - Ok(Some(BinarySettings { - path: Some(path), - arguments, - .. - })) => Some(LanguageServerBinary { - path: path.into(), - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), - env: None, - }), - Ok(Some(BinarySettings { - path_lookup: Some(false), - .. - })) => None, - _ => { - let env = delegate.shell_env().await; - let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; - Some(LanguageServerBinary { - path, - arguments: server_binary_arguments(), - env: Some(env), - }) - } - } + let path = delegate.which(Self::SERVER_NAME.as_ref()).await?; + Some(LanguageServerBinary { + path, + arguments: server_binary_arguments(), + env: None, + }) } fn will_fetch_server( @@ -214,18 +186,6 @@ impl super::LspAdapter for GoLspAdapter { get_cached_server_binary(container_dir).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir) - .await - .map(|mut binary| { - binary.arguments = vec!["--help".into()]; - binary - }) - } - async fn initialization_options( self: Arc, _: &Arc, diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 44cc68387676ee..95c4070b13a331 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -186,13 +186,6 @@ impl LspAdapter for JsonLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn initialization_options( self: Arc, _: &Arc, @@ -374,18 +367,6 @@ impl LspAdapter for NodeVersionAdapter { ) -> Option { get_cached_version_server_binary(container_dir).await } - - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_version_server_binary(container_dir) - .await - .map(|mut binary| { - binary.arguments = vec!["--version".into()]; - binary - }) - } } async fn get_cached_version_server_binary(container_dir: PathBuf) -> Option { diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 75f124489c3820..964abf42b525fa 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -97,13 +97,6 @@ impl LspAdapter for PythonLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn process_completions(&self, items: &mut [lsp::CompletionItem]) { // Pyright assigns each completion item a `sortText` of the form `XX.YYYY.name`. // Where `XX` is the sorting category, `YYYY` is based on most recent usage, diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index eebd573a7e25fa..0d644e1bfef247 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -8,7 +8,6 @@ use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; pub use language::*; use language_settings::all_language_settings; use lsp::LanguageServerBinary; -use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; use regex::Regex; use smol::fs::{self, File}; use std::{ @@ -37,77 +36,34 @@ impl LspAdapter for RustLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, + _: &AsyncAppContext, ) -> Option { - let configured_binary = cx - .update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) + let path = delegate.which("rust-analyzer".as_ref()).await?; + let env = delegate.shell_env().await; + + // It is surprisingly common for ~/.cargo/bin/rust-analyzer to be a symlink to + // /usr/bin/rust-analyzer that fails when you run it; so we need to test it. + log::info!("found rust-analyzer in PATH. trying to run `rust-analyzer --help`"); + let result = delegate + .try_exec(LanguageServerBinary { + path: path.clone(), + arguments: vec!["--help".into()], + env: Some(env.clone()), }) - .ok()?; - - let (path, env, arguments) = match configured_binary { - // If nothing is configured, or path_lookup explicitly enabled, - // we lookup the binary in the path. - None - | Some(BinarySettings { - path: None, - path_lookup: Some(true), - .. - }) - | Some(BinarySettings { - path: None, - path_lookup: None, - .. - }) => { - let path = delegate.which("rust-analyzer".as_ref()).await; - let env = delegate.shell_env().await; - - if let Some(path) = path { - // It is surprisingly common for ~/.cargo/bin/rust-analyzer to be a symlink to - // /usr/bin/rust-analyzer that fails when you run it; so we need to test it. - log::info!("found rust-analyzer in PATH. trying to run `rust-analyzer --help`"); - match delegate - .try_exec(LanguageServerBinary { - path: path.clone(), - arguments: vec!["--help".into()], - env: Some(env.clone()), - }) - .await - { - Ok(()) => (Some(path), Some(env), None), - Err(err) => { - log::error!("failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {}", path, err); - (None, None, None) - } - } - } else { - (None, None, None) - } - } - // Otherwise, we use the configured binary. - Some(BinarySettings { - path: Some(path), - arguments, - path_lookup, - }) => { - if path_lookup.is_some() { - log::warn!("Both `path` and `path_lookup` are set, ignoring `path_lookup`"); - } - (Some(path.into()), None, arguments) - } - - _ => (None, None, None), - }; + .await; + if let Err(err) = result { + log::error!( + "failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {}", + path, + err + ); + return None; + } - path.map(|path| LanguageServerBinary { + Some(LanguageServerBinary { path, - env, - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), + env: Some(env), + arguments: vec![], }) } @@ -186,18 +142,6 @@ impl LspAdapter for RustLspAdapter { get_cached_server_binary(container_dir).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir) - .await - .map(|mut binary| { - binary.arguments = vec!["--help".into()]; - binary - }) - } - fn disk_based_diagnostic_sources(&self) -> Vec { vec!["rustc".into()] } diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 62d967d6a4a267..4ed5c742a9fc8d 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -46,38 +46,6 @@ impl LspAdapter for TailwindLspAdapter { Self::SERVER_NAME.clone() } - async fn check_if_user_installed( - &self, - delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, - ) -> Option { - let configured_binary = cx - .update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) - }) - .ok()??; - - let path = if let Some(configured_path) = configured_binary.path.map(PathBuf::from) { - configured_path - } else { - self.node.binary_path().await.ok()? - }; - - let arguments = configured_binary - .arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(); - - Some(LanguageServerBinary { - path, - arguments, - env: None, - }) - } - async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, @@ -125,13 +93,6 @@ impl LspAdapter for TailwindLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn initialization_options( self: Arc, _: &Arc, diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index b7eb21132d52c4..cfd7e04bc64177 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -164,13 +164,6 @@ impl LspAdapter for TypeScriptLspAdapter { get_cached_ts_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_ts_server_binary(container_dir, &self.node).await - } - fn code_action_kinds(&self) -> Option> { Some(vec![ CodeActionKind::QUICKFIX, @@ -509,19 +502,6 @@ impl LspAdapter for EsLintLspAdapter { arguments: eslint_server_binary_arguments(&server_path), }) } - - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - let server_path = - Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH); - Some(LanguageServerBinary { - path: self.node.binary_path().await.ok()?, - env: None, - arguments: eslint_server_binary_arguments(&server_path), - }) - } } #[cfg(target_os = "windows")] diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index de6d575a8ee9fd..ff8637dc28dbd8 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -5,7 +5,7 @@ use gpui::AsyncAppContext; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; -use project::{lsp_store::language_server_settings, project_settings::BinarySettings}; +use project::lsp_store::language_server_settings; use serde_json::Value; use std::{ any::Any, @@ -71,40 +71,15 @@ impl LspAdapter for VtslsLspAdapter { async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, + _: &AsyncAppContext, ) -> Option { - let configured_binary = cx.update(|cx| { - language_server_settings(delegate, &SERVER_NAME, cx).and_then(|s| s.binary.clone()) - }); - - match configured_binary { - Ok(Some(BinarySettings { - path: Some(path), - arguments, - .. - })) => Some(LanguageServerBinary { - path: path.into(), - arguments: arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(), - env: None, - }), - Ok(Some(BinarySettings { - path_lookup: Some(false), - .. - })) => None, - _ => { - let env = delegate.shell_env().await; - let path = delegate.which(SERVER_NAME.as_ref()).await?; - Some(LanguageServerBinary { - path: path.clone(), - arguments: typescript_server_binary_arguments(&path), - env: Some(env), - }) - } - } + let env = delegate.shell_env().await; + let path = delegate.which(SERVER_NAME.as_ref()).await?; + Some(LanguageServerBinary { + path: path.clone(), + arguments: typescript_server_binary_arguments(&path), + env: Some(env), + }) } async fn fetch_server_binary( @@ -157,13 +132,6 @@ impl LspAdapter for VtslsLspAdapter { get_cached_ts_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_ts_server_binary(container_dir, &self.node).await - } - fn code_action_kinds(&self) -> Option> { Some(vec![ CodeActionKind::QUICKFIX, diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 32ca73168ab2d2..642d6c030ac915 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -42,37 +42,6 @@ impl LspAdapter for YamlLspAdapter { Self::SERVER_NAME.clone() } - async fn check_if_user_installed( - &self, - delegate: &dyn LspAdapterDelegate, - cx: &AsyncAppContext, - ) -> Option { - let configured_binary = cx - .update(|cx| { - language_server_settings(delegate, &Self::SERVER_NAME, cx) - .and_then(|s| s.binary.clone()) - }) - .ok()??; - - let path = if let Some(configured_path) = configured_binary.path.map(PathBuf::from) { - configured_path - } else { - self.node.binary_path().await.ok()? - }; - - let arguments = configured_binary - .arguments - .unwrap_or_default() - .iter() - .map(|arg| arg.into()) - .collect(); - Some(LanguageServerBinary { - path, - arguments, - env: None, - }) - } - async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, @@ -120,13 +89,6 @@ impl LspAdapter for YamlLspAdapter { get_cached_server_binary(container_dir, &self.node).await } - async fn installation_test_binary( - &self, - container_dir: PathBuf, - ) -> Option { - get_cached_server_binary(container_dir, &self.node).await - } - async fn workspace_configuration( self: Arc, delegate: &Arc, diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index c2a5951de72101..e380da052ddc95 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -64,6 +64,15 @@ pub struct LanguageServerBinary { pub env: Option>, } +/// Configures the search (and installation) of language servers. +#[derive(Debug, Clone, Deserialize)] +pub struct LanguageServerBinaryOptions { + /// Whether the adapter should look at the users system + pub allow_path_lookup: bool, + /// Whether the adapter should download its own version + pub allow_binary_download: bool, +} + /// A running language server process. pub struct LanguageServer { server_id: LanguageServerId, diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 8d859c091bfe93..21d5de53e6be28 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -37,16 +37,16 @@ use language::{ proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageConfig, - LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, - LspAdapterDelegate, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, - ToPointUtf16, Transaction, Unclipped, + LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerBinaryStatus, + LanguageServerName, LocalFile, LspAdapter, LspAdapterDelegate, Patch, PointUtf16, + TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, }; use lsp::{ CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, FileSystemWatcher, InsertTextFormat, - LanguageServer, LanguageServerBinary, LanguageServerId, LspRequestFuture, MessageActionItem, - MessageType, OneOf, ServerHealthStatus, ServerStatus, SymbolKind, TextEdit, Url, - WorkDoneProgressCancelParams, WorkspaceFolder, + LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId, + LspRequestFuture, MessageActionItem, MessageType, OneOf, ServerHealthStatus, ServerStatus, + SymbolKind, TextEdit, Url, WorkDoneProgressCancelParams, WorkspaceFolder, }; use parking_lot::{Mutex, RwLock}; use postage::watch; @@ -67,9 +67,8 @@ use std::{ iter, mem, ops::{ControlFlow, Range}, path::{self, Path, PathBuf}, - process::Stdio, str, - sync::{atomic::Ordering::SeqCst, Arc}, + sync::Arc, time::{Duration, Instant}, }; use text::{Anchor, BufferId, LineEnding}; @@ -87,8 +86,6 @@ pub use worktree::{ FS_WATCH_LATENCY, }; -const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4; -const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1); const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); pub const SERVER_PROGRESS_THROTTLE_TIMEOUT: Duration = Duration::from_millis(100); @@ -157,6 +154,7 @@ impl LocalLspStore { futures::future::join_all(shutdown_futures).await; } } + async fn format_locally( lsp_store: WeakModel, mut buffers_with_paths: Vec<(Model, Option)>, @@ -1471,7 +1469,7 @@ impl LspStore { } for (worktree_id, adapter_name) in language_servers_to_stop { - self.stop_language_server(worktree_id, adapter_name, cx) + self.stop_local_language_server(worktree_id, adapter_name, cx) .detach(); } @@ -1488,7 +1486,7 @@ impl LspStore { // Restart all language servers with changed initialization options. for (worktree, language) in language_servers_to_restart { - self.restart_language_servers(worktree, language, cx); + self.restart_local_language_servers(worktree, language, cx); } cx.notify(); @@ -3028,7 +3026,7 @@ impl LspStore { }) } - pub fn primary_language_server_for_buffer<'a>( + fn primary_language_server_for_buffer<'a>( &'a self, buffer: &'a Buffer, cx: &'a AppContext, @@ -3328,7 +3326,7 @@ impl LspStore { Ok(()) } - pub fn update_worktree_diagnostics( + fn update_worktree_diagnostics( &mut self, worktree_id: WorktreeId, server_id: LanguageServerId, @@ -5405,9 +5403,6 @@ impl LspStore { language_registry: self.languages.clone(), }) as Arc; - // TODO: We should use `adapter` here instead of reaching through the `CachedLspAdapter`. - let lsp_adapter = adapter.adapter.clone(); - let Some((upstream_client, project_id)) = self.upstream_client() else { return; }; @@ -5419,17 +5414,11 @@ impl LspStore { return; }; - let task = cx.spawn(|_, cx| async move { - let user_binary_task = lsp_adapter.check_if_user_installed(delegate.as_ref(), &cx); - let binary = match user_binary_task.await { - Some(binary) => binary, - None => { - return Err(anyhow!( - "Downloading language server for ssh host is not supported yet" - )) - } - }; + let user_binary_task = + self.get_language_server_binary(adapter.clone(), delegate.clone(), false, cx); + let task = cx.spawn(|_, _| async move { + let binary = user_binary_task.await?; let name = adapter.name(); let code_action_kinds = adapter .adapter @@ -5481,6 +5470,73 @@ impl LspStore { .detach(); } + fn get_language_server_binary( + &self, + adapter: Arc, + delegate: Arc, + allow_binary_download: bool, + cx: &mut ModelContext, + ) -> Task> { + let settings = ProjectSettings::get( + Some(SettingsLocation { + worktree_id: delegate.worktree_id(), + path: Path::new(""), + }), + cx, + ) + .lsp + .get(&adapter.name) + .and_then(|s| s.binary.clone()); + + if settings.as_ref().is_some_and(|b| b.path.is_some()) { + let settings = settings.unwrap(); + return cx.spawn(|_, _| async move { + Ok(LanguageServerBinary { + path: PathBuf::from(&settings.path.unwrap()), + env: Some(delegate.shell_env().await), + arguments: settings + .arguments + .unwrap_or_default() + .iter() + .map(Into::into) + .collect(), + }) + }); + } + let lsp_binary_options = LanguageServerBinaryOptions { + allow_path_lookup: !settings + .as_ref() + .and_then(|b| b.ignore_system_version) + .unwrap_or_default(), + allow_binary_download, + }; + cx.spawn(|_, mut cx| async move { + let binary_result = adapter + .clone() + .get_language_server_command(delegate.clone(), lsp_binary_options, &mut cx) + .await; + + delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None); + + let mut binary = binary_result?; + if let Some(arguments) = settings.and_then(|b| b.arguments) { + binary.arguments = arguments.into_iter().map(Into::into).collect(); + } + + // If we do have a project environment (either by spawning a shell in in the project directory + // or by getting it from the CLI) and the language server command itself + // doesn't have an environment, then we use the project environment. + if binary.env.is_none() { + log::info!( + "using project environment for language server {:?}", + adapter.name() + ); + binary.env = Some(delegate.shell_env().await); + } + Ok(binary) + }) + } + fn start_language_server( &mut self, worktree_handle: &Model, @@ -5496,6 +5552,7 @@ impl LspStore { let worktree_id = worktree.id(); let worktree_path = worktree.abs_path(); let key = (worktree_id, adapter.name.clone()); + if self.language_server_ids.contains_key(&key) { return; } @@ -5505,31 +5562,6 @@ impl LspStore { return; } - if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT { - return; - } - - let local = self.as_local().unwrap(); - - let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); - let lsp_adapter_delegate = LocalLspAdapterDelegate::for_local(self, worktree_handle, cx); - let project_environment = local.environment.update(cx, |environment, cx| { - environment.get_environment(Some(worktree_id), Some(worktree_path.clone()), cx) - }); - - let pending_server = match self.languages.create_pending_language_server( - stderr_capture.clone(), - language.clone(), - adapter.clone(), - Arc::clone(&worktree_path), - lsp_adapter_delegate.clone(), - project_environment, - cx, - ) { - Some(pending_server) => pending_server, - None => return, - }; - let project_settings = ProjectSettings::get( Some(SettingsLocation { worktree_id, @@ -5537,76 +5569,146 @@ impl LspStore { }), cx, ); - - // We need some on the SSH client, and some on SSH host let lsp = project_settings.lsp.get(&adapter.name); let override_options = lsp.and_then(|s| s.initialization_options.clone()); - let server_id = pending_server.server_id; - let container_dir = pending_server.container_dir.clone(); - let state = LanguageServerState::Starting({ + let stderr_capture = Arc::new(Mutex::new(Some(String::new()))); + let delegate = LocalLspAdapterDelegate::for_local(self, worktree_handle, cx) + as Arc; + + let server_id = self.languages.next_language_server_id(); + let root_path = worktree_path.clone(); + log::info!( + "attempting to start language server {:?}, path: {root_path:?}, id: {server_id}", + adapter.name.0 + ); + + let binary = self.get_language_server_binary(adapter.clone(), delegate.clone(), true, cx); + + let pending_server = cx.spawn({ let adapter = adapter.clone(); + let stderr_capture = stderr_capture.clone(); + + move |_lsp_store, cx| async move { + let binary = binary.await?; + + #[cfg(any(test, feature = "test-support"))] + if let Some(server) = _lsp_store + .update(&mut cx.clone(), |this, cx| { + this.languages.create_fake_language_server( + server_id, + &adapter.name, + binary.clone(), + cx.to_async(), + ) + }) + .ok() + .flatten() + { + return Ok(server); + } + + lsp::LanguageServer::new( + stderr_capture, + server_id, + binary, + &root_path, + adapter.code_action_kinds(), + cx, + ) + } + }); + + let state = LanguageServerState::Starting({ let server_name = adapter.name.0.clone(); + let delegate = delegate as Arc; let language = language.clone(); let key = key.clone(); + let adapter = adapter.clone(); cx.spawn(move |this, mut cx| async move { - let result = Self::setup_and_insert_language_server( - this.clone(), - lsp_adapter_delegate, - override_options, - pending_server, - adapter.clone(), - language.clone(), - server_id, - key, - &mut cx, - ) - .await; + let result = { + let delegate = delegate.clone(); + let adapter = adapter.clone(); + let this = this.clone(); + let mut cx = cx.clone(); + async move { + let language_server = pending_server.await?; - match result { - Ok(server) => { - stderr_capture.lock().take(); - server - } + let workspace_config = adapter + .adapter + .clone() + .workspace_configuration(&delegate, &mut cx) + .await?; - Err(err) => { - log::error!("failed to start language server {server_name:?}: {err}"); - log::error!("server stderr: {:?}", stderr_capture.lock().take()); + let mut initialization_options = adapter + .adapter + .clone() + .initialization_options(&(delegate)) + .await?; - let this = this.upgrade()?; - let container_dir = container_dir?; + Self::setup_lsp_messages(this.clone(), &language_server, delegate, adapter); - let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst); - if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT { - let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT; - log::error!("Hit {max} reinstallation attempts for {server_name:?}"); - return None; + match (&mut initialization_options, override_options) { + (Some(initialization_options), Some(override_options)) => { + merge_json_value_into(override_options, initialization_options); + } + (None, override_options) => initialization_options = override_options, + _ => {} } - log::info!( - "retrying installation of language server {server_name:?} in {}s", - SERVER_REINSTALL_DEBOUNCE_TIMEOUT.as_secs() - ); - cx.background_executor() - .timer(SERVER_REINSTALL_DEBOUNCE_TIMEOUT) - .await; + let language_server = cx + .update(|cx| language_server.initialize(initialization_options, cx))? + .await + .inspect_err(|_| { + if let Some(this) = this.upgrade() { + this.update(&mut cx, |_, cx| { + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)) + }) + .ok(); + } + })?; - let installation_test_binary = adapter - .installation_test_binary(container_dir.to_path_buf()) - .await; + language_server + .notify::( + lsp::DidChangeConfigurationParams { + settings: workspace_config, + }, + ) + .ok(); - this.update(&mut cx, |_, cx| { - Self::check_errored_server( + anyhow::Ok(language_server) + } + } + .await; + + match result { + Ok(server) => { + this.update(&mut cx, |this, mut cx| { + this.insert_newly_running_language_server( language, adapter, + server.clone(), server_id, - installation_test_binary, - cx, - ) + key, + &mut cx, + ); }) .ok(); + stderr_capture.lock().take(); + Some(server) + } + Err(err) => { + let log = stderr_capture.lock().take().unwrap_or_default(); + delegate.update_status( + adapter.name(), + LanguageServerBinaryStatus::Failed { + error: format!("{err}\n-- stderr--\n{}", log), + }, + ); + log::error!("Failed to start language server {server_name:?}: {err}"); + log::error!("server stderr: {:?}", log); None } } @@ -5620,109 +5722,6 @@ impl LspStore { self.language_server_ids.insert(key, server_id); } - #[allow(clippy::too_many_arguments)] - async fn setup_and_insert_language_server( - this: WeakModel, - delegate: Arc, - override_initialization_options: Option, - pending_server: PendingLanguageServer, - adapter: Arc, - language: LanguageName, - server_id: LanguageServerId, - key: (WorktreeId, LanguageServerName), - cx: &mut AsyncAppContext, - ) -> Result>> { - let language_server = Self::setup_pending_language_server( - this.clone(), - override_initialization_options, - pending_server, - delegate, - adapter.clone(), - server_id, - cx, - ) - .await?; - - let this = match this.upgrade() { - Some(this) => this, - None => return Err(anyhow!("failed to upgrade project handle")), - }; - - this.update(cx, |this, cx| { - this.insert_newly_running_language_server( - language, - adapter, - language_server.clone(), - server_id, - key, - cx, - ) - })??; - - Ok(Some(language_server)) - } - - fn reinstall_language_server( - &mut self, - language: LanguageName, - adapter: Arc, - server_id: LanguageServerId, - cx: &mut ModelContext, - ) -> Option> { - log::info!("beginning to reinstall server"); - - if let Some(local) = self.as_local_mut() { - let existing_server = match local.language_servers.remove(&server_id) { - Some(LanguageServerState::Running { server, .. }) => Some(server), - _ => None, - }; - - self.worktree_store.update(cx, |store, cx| { - for worktree in store.worktrees() { - let key = (worktree.read(cx).id(), adapter.name.clone()); - self.language_server_ids.remove(&key); - } - }); - - Some(cx.spawn(move |this, mut cx| async move { - if let Some(task) = existing_server.and_then(|server| server.shutdown()) { - log::info!("shutting down existing server"); - task.await; - } - - // TODO: This is race-safe with regards to preventing new instances from - // starting while deleting, but existing instances in other projects are going - // to be very confused and messed up - let Some(task) = this - .update(&mut cx, |this, cx| { - this.languages.delete_server_container(adapter.clone(), cx) - }) - .log_err() - else { - return; - }; - task.await; - - this.update(&mut cx, |this, cx| { - for worktree in this.worktree_store.read(cx).worktrees().collect::>() { - this.start_language_server( - &worktree, - adapter.clone(), - language.clone(), - cx, - ); - } - }) - .ok(); - })) - } else if let Some(_ssh_store) = self.as_ssh() { - // TODO - None - } else { - None - } - } - async fn shutdown_language_server( server_state: Option, name: LanguageServerName, @@ -5761,7 +5760,7 @@ impl LspStore { // Returns a list of all of the worktrees which no longer have a language server and the root path // for the stopped server - pub fn stop_language_server( + fn stop_local_language_server( &mut self, worktree_id: WorktreeId, adapter_name: LanguageServerName, @@ -5877,7 +5876,6 @@ impl LspStore { .spawn(request) .detach_and_log_err(cx); } else { - #[allow(clippy::mutable_key_type)] let language_server_lookup_info: HashSet<(Model, LanguageName)> = buffers .into_iter() .filter_map(|buffer| { @@ -5893,12 +5891,12 @@ impl LspStore { .collect(); for (worktree, language) in language_server_lookup_info { - self.restart_language_servers(worktree, language, cx); + self.restart_local_language_servers(worktree, language, cx); } } } - pub fn restart_language_servers( + fn restart_local_language_servers( &mut self, worktree: Model, language: LanguageName, @@ -5912,7 +5910,8 @@ impl LspStore { .lsp_adapters(&language) .iter() .map(|adapter| { - let stop_task = self.stop_language_server(worktree_id, adapter.name.clone(), cx); + let stop_task = + self.stop_local_language_server(worktree_id, adapter.name.clone(), cx); (stop_task, adapter.name.clone()) }) .collect::>(); @@ -5951,93 +5950,14 @@ impl LspStore { .detach(); } - fn check_errored_server( - language: LanguageName, - adapter: Arc, - server_id: LanguageServerId, - installation_test_binary: Option, - cx: &mut ModelContext, - ) { - if !adapter.can_be_reinstalled() { - log::info!( - "Validation check requested for {:?} but it cannot be reinstalled", - adapter.name.0 - ); - return; - } - - cx.spawn(move |this, mut cx| async move { - log::info!("About to spawn test binary"); - - // A lack of test binary counts as a failure - let process = installation_test_binary.and_then(|binary| { - smol::process::Command::new(&binary.path) - .current_dir(&binary.path) - .args(binary.arguments) - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::inherit()) - .kill_on_drop(true) - .spawn() - .ok() - }); - - const PROCESS_TIMEOUT: Duration = Duration::from_secs(5); - let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse(); - - let mut errored = false; - if let Some(mut process) = process { - futures::select! { - status = process.status().fuse() => match status { - Ok(status) => errored = !status.success(), - Err(_) => errored = true, - }, - - _ = timeout => { - log::info!("test binary time-ed out, this counts as a success"); - _ = process.kill(); - } - } - } else { - log::warn!("test binary failed to launch"); - errored = true; - } - - if errored { - log::warn!("test binary check failed"); - let task = this - .update(&mut cx, move |this, cx| { - this.reinstall_language_server(language, adapter, server_id, cx) - }) - .ok() - .flatten(); - - if let Some(task) = task { - task.await; - } - } - }) - .detach(); - } - - async fn setup_pending_language_server( + fn setup_lsp_messages( this: WeakModel, - override_options: Option, - pending_server: PendingLanguageServer, + language_server: &LanguageServer, delegate: Arc, adapter: Arc, - server_id: LanguageServerId, - cx: &mut AsyncAppContext, - ) -> Result> { - let workspace_config = adapter - .adapter - .clone() - .workspace_configuration(&delegate, cx) - .await?; - // This has to come from the server - let (language_server, mut initialization_options) = pending_server.task.await?; - + ) { let name = language_server.name(); + let server_id = language_server.server_id(); language_server .on_notification::({ let adapter = adapter.clone(); @@ -6091,7 +6011,6 @@ impl LspStore { }) .detach(); - let id = language_server.server_id(); language_server .on_request::({ let this = this.clone(); @@ -6099,7 +6018,7 @@ impl LspStore { let this = this.clone(); async move { let Some(server) = - this.update(&mut cx, |this, _| this.language_server_for_id(id))? + this.update(&mut cx, |this, _| this.language_server_for_id(server_id))? else { return Ok(None); }; @@ -6375,9 +6294,6 @@ impl LspStore { }) .detach(); - let disk_based_diagnostics_progress_token = - adapter.disk_based_diagnostics_progress_token.clone(); - language_server .on_notification::({ let this = this.clone(); @@ -6448,6 +6364,10 @@ impl LspStore { } }) .detach(); + + let disk_based_diagnostics_progress_token = + adapter.disk_based_diagnostics_progress_token.clone(); + language_server .on_notification::({ let this = this.clone(); @@ -6502,36 +6422,6 @@ impl LspStore { } }) .detach(); - - match (&mut initialization_options, override_options) { - (Some(initialization_options), Some(override_options)) => { - merge_json_value_into(override_options, initialization_options); - } - (None, override_options) => initialization_options = override_options, - _ => {} - } - - let language_server = cx - .update(|cx| language_server.initialize(initialization_options, cx))? - .await - .inspect_err(|_| { - if let Some(this) = this.upgrade() { - this.update(cx, |_, cx| { - cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)) - }) - .ok(); - } - })?; - - language_server - .notify::( - lsp::DidChangeConfigurationParams { - settings: workspace_config, - }, - ) - .ok(); - - Ok(language_server) } pub fn update_diagnostics( @@ -6664,7 +6554,7 @@ impl LspStore { server_id: LanguageServerId, key: (WorktreeId, LanguageServerName), cx: &mut ModelContext, - ) -> Result<()> { + ) { // If the language server for this key doesn't match the server id, don't store the // server. Which will cause it to be dropped, killing the process if self @@ -6673,7 +6563,7 @@ impl LspStore { .map(|id| id != &server_id) .unwrap_or(false) { - return Ok(()); + return; } // Update language_servers collection with Running variant of LanguageServerState @@ -6703,13 +6593,15 @@ impl LspStore { cx.emit(LspStoreEvent::LanguageServerAdded(server_id)); if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() { - downstream_client.send(proto::StartLanguageServer { - project_id: *project_id, - server: Some(proto::LanguageServer { - id: server_id.0 as u64, - name: language_server.name().to_string(), - }), - })?; + downstream_client + .send(proto::StartLanguageServer { + project_id: *project_id, + server: Some(proto::LanguageServer { + id: server_id.0 as u64, + name: language_server.name().to_string(), + }), + }) + .log_err(); } // Tell the language server about every open buffer in the worktree that matches the language. @@ -6756,16 +6648,18 @@ impl LspStore { let version = snapshot.version; let initial_snapshot = &snapshot.snapshot; let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); - language_server.notify::( - lsp::DidOpenTextDocumentParams { - text_document: lsp::TextDocumentItem::new( - uri, - adapter.language_id(&language.name()), - version, - initial_snapshot.text(), - ), - }, - )?; + language_server + .notify::( + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + uri, + adapter.language_id(&language.name()), + version, + initial_snapshot.text(), + ), + }, + ) + .log_err(); buffer_handle.update(cx, |buffer, cx| { buffer.set_completion_triggers( @@ -6779,11 +6673,9 @@ impl LspStore { ) }); } - anyhow::Ok(()) - })?; + }); cx.notify(); - Ok(()) } fn buffer_snapshot_for_lsp_version( @@ -6878,7 +6770,7 @@ impl LspStore { }) } - pub fn register_supplementary_language_server( + fn register_supplementary_language_server( &mut self, id: LanguageServerId, name: LanguageServerName, @@ -6893,7 +6785,7 @@ impl LspStore { } } - pub fn unregister_supplementary_language_server( + fn unregister_supplementary_language_server( &mut self, id: LanguageServerId, cx: &mut ModelContext, @@ -7807,11 +7699,8 @@ impl LspAdapter for SshLspAdapter { ) -> Result { anyhow::bail!("SshLspAdapter does not support fetch_server_binary") } - - async fn installation_test_binary(&self, _: PathBuf) -> Option { - None - } } + pub fn language_server_settings<'a, 'b: 'a>( delegate: &'a dyn LspAdapterDelegate, language: &LanguageServerName, @@ -7855,22 +7744,6 @@ impl LocalLspAdapterDelegate { Self::new(lsp_store, worktree, http_client, local.fs.clone(), cx) } - // fn for_ssh( - // lsp_store: &LspStore, - // worktree: &Model, - // upstream_client: AnyProtoClient, - // cx: &mut ModelContext, - // ) -> Arc { - // Self::new( - // lsp_store, - // worktree, - // Arc::new(BlockedHttpClient), - // None, - // Some(upstream_client), - // cx, - // ) - // } - pub fn new( lsp_store: &LspStore, worktree: &Model, @@ -7972,6 +7845,19 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { .update_lsp_status(server_name, status); } + async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option> { + let dir = self.language_registry.language_server_download_dir(name)?; + + if !dir.exists() { + smol::fs::create_dir_all(&dir) + .await + .context("failed to create container directory") + .log_err()?; + } + + Some(dir) + } + async fn read_text_file(&self, path: PathBuf) -> Result { if self.worktree.entry_for_path(&path).is_none() { return Err(anyhow!("no such path {path:?}")); @@ -8056,6 +7942,10 @@ impl LspAdapterDelegate for SshLspAdapterDelegate { Ok(()) } + async fn language_server_download_dir(&self, _: &LanguageServerName) -> Option> { + None + } + fn update_status( &self, server_name: LanguageServerName, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index ee7f93a4f933c2..8d95c8f2f18238 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3958,14 +3958,6 @@ impl Project { self.lsp_store.read(cx).supplementary_language_servers() } - pub fn language_server_adapter_for_id( - &self, - id: LanguageServerId, - cx: &AppContext, - ) -> Option> { - self.lsp_store.read(cx).language_server_adapter_for_id(id) - } - pub fn language_server_for_id( &self, id: LanguageServerId, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 68593f8fab0525..706d3afdce45e2 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -55,7 +55,7 @@ pub struct NodeBinarySettings { pub npm_path: Option, /// If disabled, zed will download its own copy of node. #[serde(default)] - pub disable_path_lookup: Option, + pub ignore_system_version: Option, } #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] @@ -143,7 +143,7 @@ const fn true_value() -> bool { pub struct BinarySettings { pub path: Option, pub arguments: Option>, - pub path_lookup: Option, + pub ignore_system_version: Option, } #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index e3fe2baefa9b6c..0f37e06f438f92 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -481,7 +481,7 @@ fn main() { cx.observe_global::(move |cx| { let settings = &ProjectSettings::get_global(cx).node; let options = NodeBinaryOptions { - allow_path_lookup: !settings.disable_path_lookup.unwrap_or_default(), + allow_path_lookup: !settings.ignore_system_version.unwrap_or_default(), // TODO: Expose this setting allow_binary_download: true, use_paths: settings.path.as_ref().map(|node_path| { diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 233c378dae06d3..02e90d60a403b3 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -64,14 +64,14 @@ You can configure which `rust-analyzer` binary Zed should use. By default, Zed will try to find a `rust-analyzer` in your `$PATH` and try to use that. If that binary successfully executes `rust-analyzer --help`, it's used. Otherwise, Zed will fall back to installing its own `rust-analyzer` version and using that. -If you want to disable Zed looking for a `rust-analyzer` binary, you can set `path_lookup` to `false` in your `settings.json`: +If you want to disable Zed looking for a `rust-analyzer` binary, you can set `ignore_system_version` to `true` in your `settings.json`: ```json { "lsp": { "rust-analyzer": { "binary": { - "path_lookup": false + "ignore_system_version": true } } } From 1eddd2f38d844f50af4ff0e76ddab63687519d7f Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Wed, 25 Sep 2024 15:21:00 -0400 Subject: [PATCH 331/762] Fix file descriptors leak in evals (#18351) Fixes an issue where evals were hitting "too many open files" errors because we were adding (and detaching) new directory watches for each project. Now we add those watches globally/at the worktree level, and we store the tasks so they stop watching on drop. Release Notes: - N/A --------- Co-authored-by: Max Co-authored-by: Piotr Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> --- Cargo.lock | 1 + .../src/platform/mac/attributed_string.rs | 4 +- crates/project/src/project.rs | 10 +--- crates/snippet_provider/Cargo.toml | 1 + crates/snippet_provider/src/lib.rs | 55 +++++++++++++++---- 5 files changed, 49 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 41b2d6d452af53..26b979ccf72aa9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10498,6 +10498,7 @@ dependencies = [ "futures 0.3.30", "gpui", "parking_lot", + "paths", "serde", "serde_json", "snippet", diff --git a/crates/gpui/src/platform/mac/attributed_string.rs b/crates/gpui/src/platform/mac/attributed_string.rs index 663ce67d4cd417..3f1185bc145cf3 100644 --- a/crates/gpui/src/platform/mac/attributed_string.rs +++ b/crates/gpui/src/platform/mac/attributed_string.rs @@ -70,9 +70,7 @@ mod tests { unsafe { let image: id = msg_send![class!(NSImage), alloc]; - image.initWithContentsOfFile_( - NSString::alloc(nil).init_str("/Users/rtfeldman/Downloads/test.jpeg"), - ); + image.initWithContentsOfFile_(NSString::alloc(nil).init_str("test.jpeg")); let _size = image.size(); let string = NSString::alloc(nil).init_str("Test String"); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 8d95c8f2f18238..10fd88f286d994 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -587,10 +587,7 @@ impl Project { cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) .detach(); let tasks = Inventory::new(cx); - let global_snippets_dir = paths::config_dir().join("snippets"); - let snippets = - SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - + let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([]), cx); let worktree_store = cx.new_model(|_| WorktreeStore::local(false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -875,9 +872,8 @@ impl Project { let this = cx.new_model(|cx| { let replica_id = response.payload.replica_id as ReplicaId; let tasks = Inventory::new(cx); - let global_snippets_dir = paths::config_dir().join("snippets"); - let snippets = - SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); + + let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([]), cx); let mut worktrees = Vec::new(); for worktree in response.payload.worktrees { diff --git a/crates/snippet_provider/Cargo.toml b/crates/snippet_provider/Cargo.toml index 75b7210a7afa54..95ab19ebb6f992 100644 --- a/crates/snippet_provider/Cargo.toml +++ b/crates/snippet_provider/Cargo.toml @@ -15,6 +15,7 @@ fs.workspace = true futures.workspace = true gpui.workspace = true parking_lot.workspace = true +paths.workspace = true serde.workspace = true serde_json.workspace = true snippet.workspace = true diff --git a/crates/snippet_provider/src/lib.rs b/crates/snippet_provider/src/lib.rs index 17d615866a4307..a18f9ff1b6f899 100644 --- a/crates/snippet_provider/src/lib.rs +++ b/crates/snippet_provider/src/lib.rs @@ -130,8 +130,29 @@ async fn initial_scan( pub struct SnippetProvider { fs: Arc, snippets: HashMap>>>, + watch_tasks: Vec>>, } +// Watches global snippet directory, is created just once and reused across multiple projects +struct GlobalSnippetWatcher(Model); + +impl GlobalSnippetWatcher { + fn new(fs: Arc, cx: &mut AppContext) -> Self { + let global_snippets_dir = paths::config_dir().join("snippets"); + let provider = cx.new_model(|_cx| SnippetProvider { + fs, + snippets: Default::default(), + watch_tasks: vec![], + }); + provider.update(cx, |this, cx| { + this.watch_directory(&global_snippets_dir, cx) + }); + Self(provider) + } +} + +impl gpui::Global for GlobalSnippetWatcher {} + impl SnippetProvider { pub fn new( fs: Arc, @@ -139,29 +160,29 @@ impl SnippetProvider { cx: &mut AppContext, ) -> Model { cx.new_model(move |cx| { + if !cx.has_global::() { + let global_watcher = GlobalSnippetWatcher::new(fs.clone(), cx); + cx.set_global(global_watcher); + } let mut this = Self { fs, + watch_tasks: Vec::new(), snippets: Default::default(), }; - let mut task_handles = vec![]; for dir in dirs_to_watch { - task_handles.push(this.watch_directory(&dir, cx)); + this.watch_directory(&dir, cx); } - cx.spawn(|_, _| async move { - futures::future::join_all(task_handles).await; - }) - .detach(); this }) } /// Add directory to be watched for content changes - fn watch_directory(&mut self, path: &Path, cx: &mut ModelContext) -> Task> { + fn watch_directory(&mut self, path: &Path, cx: &mut ModelContext) { let path: Arc = Arc::from(path); - cx.spawn(|this, mut cx| async move { + self.watch_tasks.push(cx.spawn(|this, mut cx| async move { let fs = this.update(&mut cx, |this, _| this.fs.clone())?; let watched_path = path.clone(); let watcher = fs.watch(&watched_path, Duration::from_secs(1)); @@ -177,10 +198,10 @@ impl SnippetProvider { .await?; } Ok(()) - }) + })); } - fn lookup_snippets<'a>( + fn lookup_snippets<'a, const LOOKUP_GLOBALS: bool>( &'a self, language: &'a SnippetKind, cx: &AppContext, @@ -193,6 +214,16 @@ impl SnippetProvider { .into_iter() .flat_map(|(_, snippets)| snippets.into_iter()) .collect(); + if LOOKUP_GLOBALS { + if let Some(global_watcher) = cx.try_global::() { + user_snippets.extend( + global_watcher + .0 + .read(cx) + .lookup_snippets::(language, cx), + ); + } + } let Some(registry) = SnippetRegistry::try_global(cx) else { return user_snippets; @@ -205,11 +236,11 @@ impl SnippetProvider { } pub fn snippets_for(&self, language: SnippetKind, cx: &AppContext) -> Vec> { - let mut requested_snippets = self.lookup_snippets(&language, cx); + let mut requested_snippets = self.lookup_snippets::(&language, cx); if language.is_some() { // Look up global snippets as well. - requested_snippets.extend(self.lookup_snippets(&None, cx)); + requested_snippets.extend(self.lookup_snippets::(&None, cx)); } requested_snippets } From dc7c49bd0b386e6303472542a44aaeba0c2c0526 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:25:57 -0400 Subject: [PATCH 332/762] Pin actions/stale action to 28ca103 (#18356) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [actions/stale](https://redirect.github.com/actions/stale) | action | pinDigest | -> `28ca103` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 2d4085524bb8d0..bbafb6c9851c4a 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -8,7 +8,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v9 + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 with: repo-token: ${{ secrets.GITHUB_TOKEN }} stale-issue-message: > From ae6a3d15af5814eaa5602c23314499b73a8329e4 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 25 Sep 2024 12:45:41 -0700 Subject: [PATCH 333/762] Make python run local worktree LSPs (#18353) Release Notes: - Python: made it possible to use locally installed `pyright` if available --------- Co-authored-by: conrad --- crates/language/src/language.rs | 4 +++ crates/languages/src/python.rs | 21 +++++++++++ crates/node_runtime/src/node_runtime.rs | 3 +- crates/project/src/lsp_store.rs | 46 +++++++++++++++++++++++++ 4 files changed, 73 insertions(+), 1 deletion(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 4c75ef4eeb38da..fad799da19898d 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -313,6 +313,10 @@ pub trait LspAdapterDelegate: Send + Sync { fn update_status(&self, language: LanguageServerName, status: LanguageServerBinaryStatus); async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option>; + async fn npm_package_installed_version( + &self, + package_name: &str, + ) -> Result>; async fn which(&self, command: &OsStr) -> Option; async fn shell_env(&self) -> HashMap; async fn read_text_file(&self, path: PathBuf) -> Result; diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 964abf42b525fa..4b5fe3d277cd39 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -20,6 +20,7 @@ use task::{TaskTemplate, TaskTemplates, VariableName}; use util::ResultExt; const SERVER_PATH: &str = "node_modules/pyright/langserver.index.js"; +const NODE_MODULE_RELATIVE_SERVER_PATH: &str = "pyright/langserver.index.js"; fn server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -43,6 +44,26 @@ impl LspAdapter for PythonLspAdapter { Self::SERVER_NAME.clone() } + async fn check_if_user_installed( + &self, + delegate: &dyn LspAdapterDelegate, + _: &AsyncAppContext, + ) -> Option { + let node = delegate.which("node".as_ref()).await?; + let (node_modules_path, _) = delegate + .npm_package_installed_version(Self::SERVER_NAME.as_ref()) + .await + .log_err()??; + + let path = node_modules_path.join(NODE_MODULE_RELATIVE_SERVER_PATH); + + Some(LanguageServerBinary { + path: node, + env: None, + arguments: server_binary_arguments(&path), + }) + } + async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 9507eb75364859..0f0512c65ee714 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -177,6 +177,7 @@ impl NodeRuntime { "5000", ]); + // This is also wrong because the directory is wrong. self.run_npm_subcommand(directory, "install", &arguments) .await?; Ok(()) @@ -576,7 +577,7 @@ impl NodeRuntimeTrait for SystemNodeRuntime { } } -async fn read_package_installed_version( +pub async fn read_package_installed_version( node_module_directory: PathBuf, name: &str, ) -> Result> { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 21d5de53e6be28..bef57bafb48cf9 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -48,6 +48,7 @@ use lsp::{ LspRequestFuture, MessageActionItem, MessageType, OneOf, ServerHealthStatus, ServerStatus, SymbolKind, TextEdit, Url, WorkDoneProgressCancelParams, WorkspaceFolder, }; +use node_runtime::read_package_installed_version; use parking_lot::{Mutex, RwLock}; use postage::watch; use rand::prelude::*; @@ -7801,6 +7802,44 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { task.await.unwrap_or_default() } + async fn npm_package_installed_version( + &self, + package_name: &str, + ) -> Result> { + let local_package_directory = self.worktree_root_path(); + let node_modules_directory = local_package_directory.join("node_modules"); + + if let Some(version) = + read_package_installed_version(node_modules_directory.clone(), package_name).await? + { + return Ok(Some((node_modules_directory, version))); + } + let Some(npm) = self.which("npm".as_ref()).await else { + log::warn!( + "Failed to find npm executable for {:?}", + local_package_directory + ); + return Ok(None); + }; + + let env = self.shell_env().await; + let output = smol::process::Command::new(&npm) + .args(["root", "-g"]) + .envs(env) + .current_dir(local_package_directory) + .output() + .await?; + let global_node_modules = + PathBuf::from(String::from_utf8_lossy(&output.stdout).to_string()); + + if let Some(version) = + read_package_installed_version(global_node_modules.clone(), package_name).await? + { + return Ok(Some((global_node_modules, version))); + } + return Ok(None); + } + #[cfg(not(target_os = "windows"))] async fn which(&self, command: &OsStr) -> Option { let worktree_abs_path = self.worktree.abs_path(); @@ -7883,6 +7922,13 @@ impl LspAdapterDelegate for SshLspAdapterDelegate { .ok(); } + async fn npm_package_installed_version( + &self, + _package_name: &str, + ) -> Result> { + Ok(None) + } + fn http_client(&self) -> Arc { Arc::new(BlockedHttpClient) } From 21a023980d39b0113d39331942eb05f365af8bc0 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 25 Sep 2024 12:50:38 -0700 Subject: [PATCH 334/762] Expand git diffs when clicking the gutter strip, display their controls in a block above (#18313) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Todo: * [x] Tooltips for hunk buttons * [x] Buttons to go to next and previous hunk * [x] Ellipsis button that opens a context menu with `Revert all` /cc @iamnbutler @danilo-leal for design 👀 Release Notes: - Changed the behavior of the git gutter so that diff hunk are expanded immediately when clicking the gutter, and hunk controls are displayed above the hunk. --------- Co-authored-by: Marshall Co-authored-by: Marshall Bowers --- crates/collab/src/tests/editor_tests.rs | 289 +----------- crates/editor/src/editor.rs | 126 ++--- crates/editor/src/editor_tests.rs | 211 ++++----- crates/editor/src/element.rs | 95 +--- crates/editor/src/hunk_diff.rs | 597 +++++++++++++++--------- 5 files changed, 570 insertions(+), 748 deletions(-) diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 121c93656305a6..d2835edc619e20 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -7,18 +7,12 @@ use collections::HashMap; use editor::{ actions::{ ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename, - RevertSelectedHunks, ToggleCodeActions, Undo, - }, - display_map::DisplayRow, - test::{ - editor_hunks, - editor_test_context::{AssertionContextManager, EditorTestContext}, - expanded_hunks, expanded_hunks_background_highlights, + ToggleCodeActions, Undo, }, + test::editor_test_context::{AssertionContextManager, EditorTestContext}, Editor, }; use futures::StreamExt; -use git::diff::DiffHunkStatus; use gpui::{TestAppContext, UpdateGlobal, VisualContext, VisualTestContext}; use indoc::indoc; use language::{ @@ -1970,285 +1964,6 @@ async fn test_inlay_hint_refresh_is_forwarded( }); } -#[gpui::test] -async fn test_multiple_hunk_types_revert(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { - let mut server = TestServer::start(cx_a.executor()).await; - let client_a = server.create_client(cx_a, "user_a").await; - let client_b = server.create_client(cx_b, "user_b").await; - server - .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) - .await; - let active_call_a = cx_a.read(ActiveCall::global); - let active_call_b = cx_b.read(ActiveCall::global); - - cx_a.update(editor::init); - cx_b.update(editor::init); - - client_a.language_registry().add(rust_lang()); - client_b.language_registry().add(rust_lang()); - - let base_text = indoc! {r#"struct Row; -struct Row1; -struct Row2; - -struct Row4; -struct Row5; -struct Row6; - -struct Row8; -struct Row9; -struct Row10;"#}; - - client_a - .fs() - .insert_tree( - "/a", - json!({ - "main.rs": base_text, - }), - ) - .await; - let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; - active_call_a - .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx)) - .await - .unwrap(); - let project_id = active_call_a - .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) - .await - .unwrap(); - - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; - active_call_b - .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) - .await - .unwrap(); - - let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); - let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); - - let editor_a = workspace_a - .update(cx_a, |workspace, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, cx) - }) - .await - .unwrap() - .downcast::() - .unwrap(); - - let editor_b = workspace_b - .update(cx_b, |workspace, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, cx) - }) - .await - .unwrap() - .downcast::() - .unwrap(); - - let mut editor_cx_a = EditorTestContext { - cx: cx_a.clone(), - window: cx_a.handle(), - editor: editor_a, - assertion_cx: AssertionContextManager::new(), - }; - let mut editor_cx_b = EditorTestContext { - cx: cx_b.clone(), - window: cx_b.handle(), - editor: editor_b, - assertion_cx: AssertionContextManager::new(), - }; - - // host edits the file, that differs from the base text, producing diff hunks - editor_cx_a.set_state(indoc! {r#"struct Row; - struct Row0.1; - struct Row0.2; - struct Row1; - - struct Row4; - struct Row5444; - struct Row6; - - struct Row9; - struct Row1220;ˇ"#}); - editor_cx_a.update_editor(|editor, cx| { - editor - .buffer() - .read(cx) - .as_singleton() - .unwrap() - .update(cx, |buffer, cx| { - buffer.set_diff_base(Some(base_text.into()), cx); - }); - }); - editor_cx_b.update_editor(|editor, cx| { - editor - .buffer() - .read(cx) - .as_singleton() - .unwrap() - .update(cx, |buffer, cx| { - buffer.set_diff_base(Some(base_text.into()), cx); - }); - }); - cx_a.executor().run_until_parked(); - cx_b.executor().run_until_parked(); - - // the client selects a range in the updated buffer, expands it to see the diff for each hunk in the selection - // the host does not see the diffs toggled - editor_cx_b.set_selections_state(indoc! {r#"«ˇstruct Row; - struct Row0.1; - struct Row0.2; - struct Row1; - - struct Row4; - struct Row5444; - struct Row6; - - struct R»ow9; - struct Row1220;"#}); - editor_cx_b - .update_editor(|editor, cx| editor.toggle_hunk_diff(&editor::actions::ToggleHunkDiff, cx)); - cx_a.executor().run_until_parked(); - cx_b.executor().run_until_parked(); - editor_cx_a.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![ - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(1)..DisplayRow(3) - ), - ( - "struct Row2;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(4)..DisplayRow(4) - ), - ( - "struct Row5;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) - ), - ( - "struct Row8;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(9)..DisplayRow(9) - ), - ( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(10)..DisplayRow(10), - ), - ] - ); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - editor_cx_b.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(1)..=DisplayRow(2), DisplayRow(8)..=DisplayRow(8)], - ); - assert_eq!( - all_hunks, - vec![ - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(1)..DisplayRow(3) - ), - ( - "struct Row2;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) - ), - ( - "struct Row5;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(8)..DisplayRow(9) - ), - ( - "struct Row8;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(12)..DisplayRow(12) - ), - ( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(13)..DisplayRow(13), - ), - ] - ); - assert_eq!(all_expanded_hunks, &all_hunks[..all_hunks.len() - 1]); - }); - - // the client reverts the hunks, removing the expanded diffs too - // both host and the client observe the reverted state (with one hunk left, not covered by client's selection) - editor_cx_b.update_editor(|editor, cx| { - editor.revert_selected_hunks(&RevertSelectedHunks, cx); - }); - cx_a.executor().run_until_parked(); - cx_b.executor().run_until_parked(); - editor_cx_a.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(10)..DisplayRow(10), - )] - ); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - editor_cx_b.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![( - "struct Row10;".to_string(), - DiffHunkStatus::Modified, - DisplayRow(10)..DisplayRow(10), - )] - ); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - editor_cx_a.assert_editor_state(indoc! {r#"struct Row; - struct Row1; - struct Row2; - - struct Row4; - struct Row5; - struct Row6; - - struct Row8; - struct Row9; - struct Row1220;ˇ"#}); - editor_cx_b.assert_editor_state(indoc! {r#"«ˇstruct Row; - struct Row1; - struct Row2; - - struct Row4; - struct Row5; - struct Row6; - - struct Row8; - struct R»ow9; - struct Row1220;"#}); -} - #[gpui::test(iterations = 10)] async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let mut server = TestServer::start(cx_a.executor()).await; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ad5cd24d73ac46..78c8ba6920337c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -154,7 +154,7 @@ use theme::{ }; use ui::{ h_flex, prelude::*, ButtonSize, ButtonStyle, Disclosure, IconButton, IconName, IconSize, - ListItem, Popover, Tooltip, + ListItem, Popover, PopoverMenuHandle, Tooltip, }; use util::{defer, maybe, post_inc, RangeExt, ResultExt, TryFutureExt}; use workspace::item::{ItemHandle, PreviewTabsSettings}; @@ -562,6 +562,7 @@ pub struct Editor { nav_history: Option, context_menu: RwLock>, mouse_context_menu: Option, + hunk_controls_menu_handle: PopoverMenuHandle, completion_tasks: Vec<(CompletionId, Task>)>, signature_help_state: SignatureHelpState, auto_signature_help: Option, @@ -1938,6 +1939,7 @@ impl Editor { nav_history: None, context_menu: RwLock::new(None), mouse_context_menu: None, + hunk_controls_menu_handle: PopoverMenuHandle::default(), completion_tasks: Default::default(), signature_help_state: SignatureHelpState::default(), auto_signature_help: None, @@ -5383,23 +5385,6 @@ impl Editor { })) } - fn close_hunk_diff_button( - &self, - hunk: HoveredHunk, - row: DisplayRow, - cx: &mut ViewContext, - ) -> IconButton { - IconButton::new( - ("close_hunk_diff_indicator", row.0 as usize), - ui::IconName::Close, - ) - .shape(ui::IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .tooltip(|cx| Tooltip::for_action("Close hunk diff", &ToggleHunkDiff, cx)) - .on_click(cx.listener(move |editor, _e, cx| editor.toggle_hovered_hunk(&hunk, cx))) - } - pub fn context_menu_visible(&self) -> bool { self.context_menu .read() @@ -9335,32 +9320,42 @@ impl Editor { } } - fn go_to_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext) { + fn go_to_next_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext) { let snapshot = self .display_map .update(cx, |display_map, cx| display_map.snapshot(cx)); let selection = self.selections.newest::(cx); + self.go_to_hunk_after_position(&snapshot, selection.head(), cx); + } - if !self.seek_in_direction( - &snapshot, - selection.head(), + fn go_to_hunk_after_position( + &mut self, + snapshot: &DisplaySnapshot, + position: Point, + cx: &mut ViewContext<'_, Editor>, + ) -> Option { + if let Some(hunk) = self.go_to_next_hunk_in_direction( + snapshot, + position, false, - snapshot.buffer_snapshot.git_diff_hunks_in_range( - MultiBufferRow(selection.head().row + 1)..MultiBufferRow::MAX, - ), + snapshot + .buffer_snapshot + .git_diff_hunks_in_range(MultiBufferRow(position.row + 1)..MultiBufferRow::MAX), cx, ) { - let wrapped_point = Point::zero(); - self.seek_in_direction( - &snapshot, - wrapped_point, - true, - snapshot.buffer_snapshot.git_diff_hunks_in_range( - MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX, - ), - cx, - ); + return Some(hunk); } + + let wrapped_point = Point::zero(); + self.go_to_next_hunk_in_direction( + snapshot, + wrapped_point, + true, + snapshot.buffer_snapshot.git_diff_hunks_in_range( + MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX, + ), + cx, + ) } fn go_to_prev_hunk(&mut self, _: &GoToPrevHunk, cx: &mut ViewContext) { @@ -9369,52 +9364,65 @@ impl Editor { .update(cx, |display_map, cx| display_map.snapshot(cx)); let selection = self.selections.newest::(cx); - if !self.seek_in_direction( - &snapshot, - selection.head(), + self.go_to_hunk_before_position(&snapshot, selection.head(), cx); + } + + fn go_to_hunk_before_position( + &mut self, + snapshot: &DisplaySnapshot, + position: Point, + cx: &mut ViewContext<'_, Editor>, + ) -> Option { + if let Some(hunk) = self.go_to_next_hunk_in_direction( + snapshot, + position, false, - snapshot.buffer_snapshot.git_diff_hunks_in_range_rev( - MultiBufferRow(0)..MultiBufferRow(selection.head().row), - ), + snapshot + .buffer_snapshot + .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(position.row)), cx, ) { - let wrapped_point = snapshot.buffer_snapshot.max_point(); - self.seek_in_direction( - &snapshot, - wrapped_point, - true, - snapshot.buffer_snapshot.git_diff_hunks_in_range_rev( - MultiBufferRow(0)..MultiBufferRow(wrapped_point.row), - ), - cx, - ); + return Some(hunk); } + + let wrapped_point = snapshot.buffer_snapshot.max_point(); + self.go_to_next_hunk_in_direction( + snapshot, + wrapped_point, + true, + snapshot + .buffer_snapshot + .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(wrapped_point.row)), + cx, + ) } - fn seek_in_direction( + fn go_to_next_hunk_in_direction( &mut self, snapshot: &DisplaySnapshot, initial_point: Point, is_wrapped: bool, hunks: impl Iterator, cx: &mut ViewContext, - ) -> bool { + ) -> Option { let display_point = initial_point.to_display_point(snapshot); let mut hunks = hunks - .map(|hunk| diff_hunk_to_display(&hunk, snapshot)) - .filter(|hunk| is_wrapped || !hunk.contains_display_row(display_point.row())) + .map(|hunk| (diff_hunk_to_display(&hunk, snapshot), hunk)) + .filter(|(display_hunk, _)| { + is_wrapped || !display_hunk.contains_display_row(display_point.row()) + }) .dedup(); - if let Some(hunk) = hunks.next() { + if let Some((display_hunk, hunk)) = hunks.next() { self.change_selections(Some(Autoscroll::fit()), cx, |s| { - let row = hunk.start_display_row(); + let row = display_hunk.start_display_row(); let point = DisplayPoint::new(row, 0); s.select_display_ranges([point..point]); }); - true + Some(hunk) } else { - false + None } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 5927c22cb08439..de1b12abe00778 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -9623,7 +9623,7 @@ async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) cx.update_editor(|editor, cx| { //Wrap around the bottom of the buffer for _ in 0..3 { - editor.go_to_hunk(&GoToHunk, cx); + editor.go_to_next_hunk(&GoToHunk, cx); } }); @@ -9709,7 +9709,7 @@ async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) //Make sure that the fold only gets one hunk for _ in 0..4 { - editor.go_to_hunk(&GoToHunk, cx); + editor.go_to_next_hunk(&GoToHunk, cx); } }); @@ -11226,7 +11226,7 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test cx.update_editor(|editor, cx| { for _ in 0..4 { - editor.go_to_hunk(&GoToHunk, cx); + editor.go_to_next_hunk(&GoToHunk, cx); editor.toggle_hunk_diff(&ToggleHunkDiff, cx); } }); @@ -11249,18 +11249,13 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(1)..=DisplayRow(1), DisplayRow(7)..=DisplayRow(7), DisplayRow(9)..=DisplayRow(9)], - "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" - ); assert_eq!( all_hunks, vec![ - ("use some::mod;\n".to_string(), DiffHunkStatus::Modified, DisplayRow(1)..DisplayRow(2)), - ("const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(4)..DisplayRow(4)), - (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(7)..DisplayRow(8)), - ("".to_string(), DiffHunkStatus::Added, DisplayRow(9)..DisplayRow(10)), + ("use some::mod;\n".to_string(), DiffHunkStatus::Modified, DisplayRow(2)..DisplayRow(3)), + ("const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(6)..DisplayRow(6)), + (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(10)..DisplayRow(11)), + ("".to_string(), DiffHunkStatus::Added, DisplayRow(13)..DisplayRow(14)), ], "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ (from modified and removed hunks)" @@ -11269,6 +11264,11 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test all_hunks, all_expanded_hunks, "Editor hunks should not change and all be expanded" ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(2)..=DisplayRow(2), DisplayRow(10)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(13)], + "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" + ); }); cx.update_editor(|editor, cx| { @@ -11311,7 +11311,7 @@ async fn test_toggled_diff_base_change( const B: u32 = 42; const C: u32 = 42; - fn main(ˇ) { + fn main() { println!("hello"); println!("world"); @@ -11356,9 +11356,9 @@ async fn test_toggled_diff_base_change( DisplayRow(3)..DisplayRow(3) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(5)..DisplayRow(7) + DisplayRow(6)..DisplayRow(7) ), ( "".to_string(), @@ -11390,22 +11390,18 @@ async fn test_toggled_diff_base_change( "# .unindent(), ); + cx.update_editor(|editor, cx| { let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(9)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(14)], - "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" - ); assert_eq!( all_hunks, vec![ - ("use some::mod1;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(1)..DisplayRow(1)), - ("const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(5)..DisplayRow(5)), - ("fn main(ˇ) {\n println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(9)..DisplayRow(11)), - ("".to_string(), DiffHunkStatus::Added, DisplayRow(13)..DisplayRow(15)), + ("use some::mod1;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(2)..DisplayRow(2)), + ("const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(7)..DisplayRow(7)), + (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(12)..DisplayRow(13)), + ("".to_string(), DiffHunkStatus::Added, DisplayRow(16)..DisplayRow(18)), ], "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ (from modified and removed hunks)" @@ -11414,6 +11410,11 @@ async fn test_toggled_diff_base_change( all_hunks, all_expanded_hunks, "Editor hunks should not change and all be expanded" ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(12)..=DisplayRow(12), DisplayRow(16)..=DisplayRow(17)], + "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" + ); }); cx.set_diff_base(Some("new diff base!")); @@ -11459,7 +11460,7 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test const B: u32 = 42; const C: u32 = 42; - fn main(ˇ) { + fn main() { println!("hello"); println!("world"); @@ -11520,9 +11521,9 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test DisplayRow(3)..DisplayRow(3) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(5)..DisplayRow(7) + DisplayRow(6)..DisplayRow(7) ), ( "".to_string(), @@ -11576,50 +11577,50 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(9)..=DisplayRow(10), - DisplayRow(13)..=DisplayRow(14), - DisplayRow(19)..=DisplayRow(19) - ] - ); assert_eq!( all_hunks, vec![ ( "use some::mod1;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(1)..DisplayRow(1) + DisplayRow(2)..DisplayRow(2) ), ( "const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) + DisplayRow(7)..DisplayRow(7) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(9)..DisplayRow(11) + DisplayRow(12)..DisplayRow(13) ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(13)..DisplayRow(15) + DisplayRow(16)..DisplayRow(18) ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(19)..DisplayRow(20) + DisplayRow(23)..DisplayRow(24) ), ( "fn another2() {\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(23)..DisplayRow(23) + DisplayRow(28)..DisplayRow(28) ), ], ); assert_eq!(all_hunks, all_expanded_hunks); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![ + DisplayRow(12)..=DisplayRow(12), + DisplayRow(16)..=DisplayRow(17), + DisplayRow(23)..=DisplayRow(23) + ] + ); }); cx.update_editor(|editor, cx| editor.fold_selected_ranges(&FoldSelectedRanges, cx)); @@ -11653,11 +11654,6 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(5)..=DisplayRow(5)], - "Only one hunk is left not folded, its highlight should be visible" - ); assert_eq!( all_hunks, vec![ @@ -11672,7 +11668,7 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test DisplayRow(0)..DisplayRow(0) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(0)..DisplayRow(0) ), @@ -11684,12 +11680,12 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(6) + DisplayRow(6)..DisplayRow(7) ), ( "fn another2() {\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(9)..DisplayRow(9) + DisplayRow(11)..DisplayRow(11) ), ], "Hunk list should still return shifted folded hunks" @@ -11700,16 +11696,21 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(6) + DisplayRow(6)..DisplayRow(7) ), ( "fn another2() {\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(9)..DisplayRow(9) + DisplayRow(11)..DisplayRow(11) ), ], "Only non-folded hunks should be left expanded" ); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(6)..=DisplayRow(6)], + "Only one hunk is left not folded, its highlight should be visible" + ); }); cx.update_editor(|editor, cx| { @@ -11746,51 +11747,51 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test let snapshot = editor.snapshot(cx); let all_hunks = editor_hunks(editor, &snapshot, cx); let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(9)..=DisplayRow(10), - DisplayRow(13)..=DisplayRow(14), - DisplayRow(19)..=DisplayRow(19) - ], - "After unfolding, all hunk diffs should be visible again" - ); assert_eq!( all_hunks, vec![ ( "use some::mod1;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(1)..DisplayRow(1) + DisplayRow(2)..DisplayRow(2) ), ( "const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) + DisplayRow(7)..DisplayRow(7) ), ( - "fn main(ˇ) {\n println!(\"hello\");\n".to_string(), + " println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(9)..DisplayRow(11) + DisplayRow(12)..DisplayRow(13) ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(13)..DisplayRow(15) + DisplayRow(16)..DisplayRow(18) ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(19)..DisplayRow(20) + DisplayRow(23)..DisplayRow(24) ), ( "fn another2() {\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(23)..DisplayRow(23) + DisplayRow(28)..DisplayRow(28) ), ], ); assert_eq!(all_hunks, all_expanded_hunks); + assert_eq!( + expanded_hunks_background_highlights(editor, cx), + vec![ + DisplayRow(12)..=DisplayRow(12), + DisplayRow(16)..=DisplayRow(17), + DisplayRow(23)..=DisplayRow(23) + ], + "After unfolding, all hunk diffs should be visible again" + ); }); } @@ -11940,17 +11941,17 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) ( "bbbb\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5), + DisplayRow(6)..DisplayRow(6), ), ( "nnnn\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(23)..DisplayRow(24), + DisplayRow(25)..DisplayRow(26), ), ( "".to_string(), DiffHunkStatus::Added, - DisplayRow(43)..DisplayRow(44), + DisplayRow(46)..DisplayRow(47), ), ]; @@ -11975,8 +11976,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) assert_eq!( expanded_hunks_background_highlights(editor, cx), vec![ - DisplayRow(23)..=DisplayRow(23), - DisplayRow(43)..=DisplayRow(43) + DisplayRow(25)..=DisplayRow(25), + DisplayRow(46)..=DisplayRow(46) ], ); assert_eq!(all_hunks, expected_all_hunks_shifted); @@ -12007,8 +12008,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) assert_eq!( expanded_hunks_background_highlights(editor, cx), vec![ - DisplayRow(23)..=DisplayRow(23), - DisplayRow(43)..=DisplayRow(43) + DisplayRow(25)..=DisplayRow(25), + DisplayRow(46)..=DisplayRow(46) ], ); assert_eq!(all_hunks, expected_all_hunks_shifted); @@ -12116,12 +12117,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(7) + DisplayRow(5)..DisplayRow(8) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(4)..=DisplayRow(6)] + vec![DisplayRow(5)..=DisplayRow(7)] ); assert_eq!(all_hunks, all_expanded_hunks); }); @@ -12156,12 +12157,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(8) + DisplayRow(5)..DisplayRow(9) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(4)..=DisplayRow(6)], + vec![DisplayRow(5)..=DisplayRow(7)], "Edited hunk should have one more line added" ); assert_eq!( @@ -12201,12 +12202,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(9) + DisplayRow(5)..DisplayRow(10) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(4)..=DisplayRow(6)], + vec![DisplayRow(5)..=DisplayRow(7)], "Edited hunk should have one more line added" ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12245,12 +12246,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(8) + DisplayRow(5)..DisplayRow(9) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(4)..=DisplayRow(6)], + vec![DisplayRow(5)..=DisplayRow(7)], "Deleting a line should shrint the hunk" ); assert_eq!( @@ -12293,12 +12294,12 @@ async fn test_edits_around_toggled_additions( vec![( "".to_string(), DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(6) + DisplayRow(6)..DisplayRow(7) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(5)] + vec![DisplayRow(6)..=DisplayRow(6)] ); assert_eq!(all_hunks, all_expanded_hunks); }); @@ -12335,7 +12336,7 @@ async fn test_edits_around_toggled_additions( ( "const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2) + DisplayRow(3)..DisplayRow(3) ) ] ); @@ -12349,7 +12350,7 @@ async fn test_edits_around_toggled_additions( vec![( "const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2) + DisplayRow(3)..DisplayRow(3) )], "Should open hunks that were adjacent to the stale addition one" ); @@ -12445,7 +12446,7 @@ async fn test_edits_around_toggled_deletions( vec![( "const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(4)..DisplayRow(4) + DisplayRow(5)..DisplayRow(5) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12485,7 +12486,7 @@ async fn test_edits_around_toggled_deletions( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) + DisplayRow(6)..DisplayRow(6) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12520,7 +12521,7 @@ async fn test_edits_around_toggled_deletions( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, - DisplayRow(6)..DisplayRow(6) + DisplayRow(7)..DisplayRow(7) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12554,12 +12555,12 @@ async fn test_edits_around_toggled_deletions( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(8) + DisplayRow(8)..DisplayRow(9) )] ); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(7)], + vec![DisplayRow(8)..=DisplayRow(8)], "Modified expanded hunks should display additions and highlight their background" ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12653,14 +12654,14 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(6)], + vec![DisplayRow(7)..=DisplayRow(7)], ); assert_eq!( all_hunks, vec![( "const C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) + DisplayRow(7)..DisplayRow(8) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12696,7 +12697,7 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(6)], + vec![DisplayRow(7)..=DisplayRow(7)], "Modified hunk should grow highlighted lines on more text additions" ); assert_eq!( @@ -12704,7 +12705,7 @@ async fn test_edits_around_toggled_modifications( vec![( "const C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(9) + DisplayRow(7)..DisplayRow(10) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12742,14 +12743,14 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(8)], + vec![DisplayRow(7)..=DisplayRow(9)], ); assert_eq!( all_hunks, vec![( "const B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(9) + DisplayRow(7)..DisplayRow(10) )], "Modified hunk should grow deleted lines on text deletions above" ); @@ -12786,7 +12787,7 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(9)], + vec![DisplayRow(7)..=DisplayRow(10)], "Modified hunk should grow deleted lines on text modifications above" ); assert_eq!( @@ -12794,7 +12795,7 @@ async fn test_edits_around_toggled_modifications( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(10) + DisplayRow(7)..DisplayRow(11) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12830,7 +12831,7 @@ async fn test_edits_around_toggled_modifications( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(8)], + vec![DisplayRow(7)..=DisplayRow(9)], "Modified hunk should grow shrink lines on modification lines removal" ); assert_eq!( @@ -12838,7 +12839,7 @@ async fn test_edits_around_toggled_modifications( vec![( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(9) + DisplayRow(7)..DisplayRow(10) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12880,7 +12881,7 @@ async fn test_edits_around_toggled_modifications( "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\nconst D: u32 = 42;\n" .to_string(), DiffHunkStatus::Removed, - DisplayRow(7)..DisplayRow(7) + DisplayRow(8)..DisplayRow(8) )] ); assert_eq!(all_hunks, all_expanded_hunks); @@ -12974,14 +12975,14 @@ async fn test_multiple_expanded_hunks_merge( let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); assert_eq!( expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(6)], + vec![DisplayRow(7)..=DisplayRow(7)], ); assert_eq!( all_hunks, vec![( "const C: u32 = 42;\n".to_string(), DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) + DisplayRow(7)..DisplayRow(8) )] ); assert_eq!(all_hunks, all_expanded_hunks); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index cf8edb67dccbc6..9fe05bc4f26063 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -11,7 +11,7 @@ use crate::{ hover_popover::{ self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT, }, - hunk_diff::{diff_hunk_to_display, DisplayDiffHunk, ExpandedHunk}, + hunk_diff::{diff_hunk_to_display, DisplayDiffHunk}, hunk_status, items::BufferSearchHighlights, mouse_context_menu::{self, MenuPosition, MouseContextMenu}, @@ -20,8 +20,8 @@ use crate::{ DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, HoveredHunk, LineDown, LineUp, OpenExcerpts, PageDown, - PageUp, Point, RangeToAnchorExt, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, - ToPoint, CURSORS_VISIBLE_FOR, MAX_LINE_LEN, + PageUp, Point, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, ToPoint, + CURSORS_VISIBLE_FOR, MAX_LINE_LEN, }; use client::ParticipantIndex; use collections::{BTreeMap, HashMap}; @@ -302,7 +302,7 @@ impl EditorElement { } register_action(view, cx, Editor::go_to_diagnostic); register_action(view, cx, Editor::go_to_prev_diagnostic); - register_action(view, cx, Editor::go_to_hunk); + register_action(view, cx, Editor::go_to_next_hunk); register_action(view, cx, Editor::go_to_prev_hunk); register_action(view, cx, |editor, a, cx| { editor.go_to_definition(a, cx).detach_and_log_err(cx); @@ -489,28 +489,7 @@ impl EditorElement { let mut modifiers = event.modifiers; if let Some(hovered_hunk) = hovered_hunk { - if modifiers.control || modifiers.platform { - editor.toggle_hovered_hunk(&hovered_hunk, cx); - } else { - let display_range = hovered_hunk - .multi_buffer_range - .clone() - .to_display_points(&position_map.snapshot); - let hunk_bounds = Self::diff_hunk_bounds( - &position_map.snapshot, - position_map.line_height, - gutter_hitbox.bounds, - &DisplayDiffHunk::Unfolded { - diff_base_byte_range: hovered_hunk.diff_base_byte_range.clone(), - display_row_range: display_range.start.row()..display_range.end.row(), - multi_buffer_range: hovered_hunk.multi_buffer_range.clone(), - status: hovered_hunk.status, - }, - ); - if hunk_bounds.contains(&event.position) { - editor.open_hunk_context_menu(hovered_hunk, event.position, cx); - } - } + editor.toggle_hovered_hunk(&hovered_hunk, cx); cx.notify(); return; } else if gutter_hitbox.is_hovered(cx) { @@ -1303,13 +1282,13 @@ impl EditorElement { let display_hunks = buffer_snapshot .git_diff_hunks_in_range(buffer_start_row..buffer_end_row) .filter_map(|hunk| { - let mut display_hunk = diff_hunk_to_display(&hunk, snapshot); + let display_hunk = diff_hunk_to_display(&hunk, snapshot); if let DisplayDiffHunk::Unfolded { multi_buffer_range, status, .. - } = &mut display_hunk + } = &display_hunk { let mut is_expanded = false; while let Some(expanded_hunk) = expanded_hunks.peek() { @@ -1332,11 +1311,7 @@ impl EditorElement { } match status { DiffHunkStatus::Added => {} - DiffHunkStatus::Modified => { - if is_expanded { - *status = DiffHunkStatus::Added; - } - } + DiffHunkStatus::Modified => {} DiffHunkStatus::Removed => { if is_expanded { return None; @@ -3371,9 +3346,6 @@ impl EditorElement { for test_indicator in layout.test_indicators.iter_mut() { test_indicator.paint(cx); } - for close_indicator in layout.close_indicators.iter_mut() { - close_indicator.paint(cx); - } if let Some(indicator) = layout.code_actions_indicator.as_mut() { indicator.paint(cx); @@ -4159,46 +4131,6 @@ impl EditorElement { + 1; self.column_pixels(digit_count, cx) } - - #[allow(clippy::too_many_arguments)] - fn layout_hunk_diff_close_indicators( - &self, - line_height: Pixels, - scroll_pixel_position: gpui::Point, - gutter_dimensions: &GutterDimensions, - gutter_hitbox: &Hitbox, - rows_with_hunk_bounds: &HashMap>, - expanded_hunks_by_rows: HashMap, - cx: &mut WindowContext, - ) -> Vec { - self.editor.update(cx, |editor, cx| { - expanded_hunks_by_rows - .into_iter() - .map(|(display_row, hunk)| { - let button = editor.close_hunk_diff_button( - HoveredHunk { - multi_buffer_range: hunk.hunk_range, - status: hunk.status, - diff_base_byte_range: hunk.diff_base_byte_range, - }, - display_row, - cx, - ); - - prepaint_gutter_button( - button, - display_row, - line_height, - gutter_dimensions, - scroll_pixel_position, - gutter_hitbox, - rows_with_hunk_bounds, - cx, - ) - }) - .collect() - }) - } } #[allow(clippy::too_many_arguments)] @@ -5549,15 +5481,6 @@ impl Element for EditorElement { } else { Vec::new() }; - let close_indicators = self.layout_hunk_diff_close_indicators( - line_height, - scroll_pixel_position, - &gutter_dimensions, - &gutter_hitbox, - &rows_with_hunk_bounds, - expanded_add_hunks_by_rows, - cx, - ); self.layout_signature_help( &hitbox, @@ -5670,7 +5593,6 @@ impl Element for EditorElement { selections, mouse_context_menu, test_indicators, - close_indicators, code_actions_indicator, gutter_fold_toggles, crease_trailers, @@ -5812,7 +5734,6 @@ pub struct EditorLayout { selections: Vec<(PlayerColor, Vec)>, code_actions_indicator: Option, test_indicators: Vec, - close_indicators: Vec, gutter_fold_toggles: Vec>, crease_trailers: Vec>, mouse_context_menu: Option, diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 67e8a25df58c56..4fa1f10a8a17c9 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -1,28 +1,26 @@ use collections::{hash_map, HashMap, HashSet}; use git::diff::DiffHunkStatus; -use gpui::{Action, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View}; +use gpui::{Action, AnchorCorner, AppContext, CursorStyle, Hsla, Model, MouseButton, Task, View}; use language::{Buffer, BufferId, Point}; use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, MultiBufferSnapshot, ToPoint, }; -use settings::SettingsStore; use std::{ ops::{Range, RangeInclusive}, sync::Arc, }; use ui::{ - prelude::*, ActiveTheme, ContextMenu, InteractiveElement, IntoElement, ParentElement, Pixels, - Styled, ViewContext, VisualContext, + prelude::*, ActiveTheme, ContextMenu, IconButtonShape, InteractiveElement, IntoElement, + ParentElement, PopoverMenu, Styled, Tooltip, ViewContext, VisualContext, }; -use util::{debug_panic, RangeExt}; +use util::RangeExt; use crate::{ - editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, - mouse_context_menu::MouseContextMenu, BlockDisposition, BlockProperties, BlockStyle, - CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, Editor, EditorElement, - EditorSnapshot, ExpandAllHunkDiffs, RangeToAnchorExt, RevertFile, RevertSelectedHunks, - ToDisplayPoint, ToggleHunkDiff, + editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, BlockDisposition, + BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, + Editor, EditorElement, EditorSnapshot, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, + RangeToAnchorExt, RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; #[derive(Debug, Clone)] @@ -41,7 +39,7 @@ pub(super) struct ExpandedHunks { #[derive(Debug, Clone)] pub(super) struct ExpandedHunk { - pub block: Option, + pub blocks: Vec, pub hunk_range: Range, pub diff_base_byte_range: Range, pub status: DiffHunkStatus, @@ -77,85 +75,6 @@ impl ExpandedHunks { } impl Editor { - pub(super) fn open_hunk_context_menu( - &mut self, - hovered_hunk: HoveredHunk, - clicked_point: gpui::Point, - cx: &mut ViewContext, - ) { - let focus_handle = self.focus_handle.clone(); - let expanded = self - .expanded_hunks - .hunks(false) - .any(|expanded_hunk| expanded_hunk.hunk_range == hovered_hunk.multi_buffer_range); - let editor_handle = cx.view().clone(); - let editor_snapshot = self.snapshot(cx); - let start_point = self - .to_pixel_point(hovered_hunk.multi_buffer_range.start, &editor_snapshot, cx) - .unwrap_or(clicked_point); - let end_point = self - .to_pixel_point(hovered_hunk.multi_buffer_range.start, &editor_snapshot, cx) - .unwrap_or(clicked_point); - let norm = - |a: gpui::Point, b: gpui::Point| (a.x - b.x).abs() + (a.y - b.y).abs(); - let closest_source = if norm(start_point, clicked_point) < norm(end_point, clicked_point) { - hovered_hunk.multi_buffer_range.start - } else { - hovered_hunk.multi_buffer_range.end - }; - - self.mouse_context_menu = MouseContextMenu::pinned_to_editor( - self, - closest_source, - clicked_point, - ContextMenu::build(cx, move |menu, _| { - menu.on_blur_subscription(Subscription::new(|| {})) - .context(focus_handle) - .entry( - if expanded { - "Collapse Hunk" - } else { - "Expand Hunk" - }, - Some(ToggleHunkDiff.boxed_clone()), - { - let editor = editor_handle.clone(); - let hunk = hovered_hunk.clone(); - move |cx| { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); - }); - } - }, - ) - .entry("Revert Hunk", Some(RevertSelectedHunks.boxed_clone()), { - let editor = editor_handle.clone(); - let hunk = hovered_hunk.clone(); - move |cx| { - let multi_buffer = editor.read(cx).buffer().clone(); - let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx); - let mut revert_changes = HashMap::default(); - if let Some(hunk) = - crate::hunk_diff::to_diff_hunk(&hunk, &multi_buffer_snapshot) - { - Editor::prepare_revert_change( - &mut revert_changes, - &multi_buffer, - &hunk, - cx, - ); - } - if !revert_changes.is_empty() { - editor.update(cx, |editor, cx| editor.revert(revert_changes, cx)); - } - } - }) - .action("Revert File", RevertFile.boxed_clone()) - }), - cx, - ) - } - pub(super) fn toggle_hovered_hunk( &mut self, hovered_hunk: &HoveredHunk, @@ -264,7 +183,8 @@ impl Editor { break; } else if expanded_hunk_row_range == hunk_to_toggle_row_range { highlights_to_remove.push(expanded_hunk.hunk_range.clone()); - blocks_to_remove.extend(expanded_hunk.block); + blocks_to_remove + .extend(expanded_hunk.blocks.iter().copied()); hunks_to_toggle.next(); retain = false; break; @@ -371,9 +291,17 @@ impl Editor { Err(ix) => ix, }; - let block = match hunk.status { + let blocks; + match hunk.status { DiffHunkStatus::Removed => { - self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, hunk, cx) + blocks = self.insert_blocks( + [ + self.hunk_header_block(&hunk, cx), + Self::deleted_text_block(hunk, diff_base_buffer, deleted_text_lines, cx), + ], + None, + cx, + ); } DiffHunkStatus::Added => { self.highlight_rows::( @@ -382,7 +310,7 @@ impl Editor { false, cx, ); - None + blocks = self.insert_blocks([self.hunk_header_block(&hunk, cx)], None, cx); } DiffHunkStatus::Modified => { self.highlight_rows::( @@ -391,13 +319,20 @@ impl Editor { false, cx, ); - self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, hunk, cx) + blocks = self.insert_blocks( + [ + self.hunk_header_block(&hunk, cx), + Self::deleted_text_block(hunk, diff_base_buffer, deleted_text_lines, cx), + ], + None, + cx, + ); } }; self.expanded_hunks.hunks.insert( block_insert_index, ExpandedHunk { - block, + blocks, hunk_range: hunk_start..hunk_end, status: hunk.status, folded: false, @@ -408,109 +343,368 @@ impl Editor { Some(()) } - fn insert_deleted_text_block( - &mut self, - diff_base_buffer: Model, - deleted_text_height: u32, + fn hunk_header_block( + &self, hunk: &HoveredHunk, - cx: &mut ViewContext<'_, Self>, - ) -> Option { - let deleted_hunk_color = deleted_hunk_color(cx); - let (editor_height, editor_with_deleted_text) = - editor_with_deleted_text(diff_base_buffer, deleted_hunk_color, hunk, cx); - let editor = cx.view().clone(); - let hunk = hunk.clone(); - let height = editor_height.max(deleted_text_height); - let mut new_block_ids = self.insert_blocks( - Some(BlockProperties { - position: hunk.multi_buffer_range.start, - height, - style: BlockStyle::Flex, - disposition: BlockDisposition::Above, - render: Box::new(move |cx| { - let width = EditorElement::diff_hunk_strip_width(cx.line_height()); - let gutter_dimensions = editor.read(cx.context).gutter_dimensions; - - let close_button = editor.update(cx.context, |editor, cx| { - let editor_snapshot = editor.snapshot(cx); - let hunk_display_range = hunk - .multi_buffer_range - .clone() - .to_display_points(&editor_snapshot); - editor.close_hunk_diff_button( - hunk.clone(), - hunk_display_range.start.row(), - cx, - ) - }); + cx: &mut ViewContext<'_, Editor>, + ) -> BlockProperties { + let border_color = cx.theme().colors().border_disabled; + let gutter_color = match hunk.status { + DiffHunkStatus::Added => cx.theme().status().created, + DiffHunkStatus::Modified => cx.theme().status().modified, + DiffHunkStatus::Removed => cx.theme().status().deleted, + }; + + BlockProperties { + position: hunk.multi_buffer_range.start, + height: 1, + style: BlockStyle::Sticky, + disposition: BlockDisposition::Above, + priority: 0, + render: Box::new({ + let editor = cx.view().clone(); + let hunk = hunk.clone(); + move |cx| { + let hunk_controls_menu_handle = + editor.read(cx).hunk_controls_menu_handle.clone(); h_flex() - .id("gutter with editor") - .bg(deleted_hunk_color) - .h(height as f32 * cx.line_height()) + .id(cx.block_id) .w_full() + .h(cx.line_height()) + .child( + div() + .id("gutter-strip") + .w(EditorElement::diff_hunk_strip_width(cx.line_height())) + .h_full() + .bg(gutter_color) + .cursor(CursorStyle::PointingHand) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ) .child( h_flex() - .id("gutter") - .max_w(gutter_dimensions.full_width()) - .min_w(gutter_dimensions.full_width()) .size_full() + .justify_between() + .border_t_1() + .border_color(border_color) .child( h_flex() - .id("gutter hunk") - .bg(cx.theme().status().deleted) - .pl(gutter_dimensions.margin - + gutter_dimensions - .git_blame_entries_width - .unwrap_or_default()) - .max_w(width) - .min_w(width) - .size_full() - .cursor(CursorStyle::PointingHand) - .on_mouse_down(MouseButton::Left, { - let editor = editor.clone(); - let hunk = hunk.clone(); - move |event, cx| { - let modifiers = event.modifiers; - if modifiers.control || modifiers.platform { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); - }); - } else { - editor.update(cx, |editor, cx| { - editor.open_hunk_context_menu( - hunk.clone(), - event.position, + .gap_2() + .pl_6() + .child( + IconButton::new("next-hunk", IconName::ArrowDown) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Next Hunk", + &GoToHunk, + &focus_handle, cx, - ); - }); - } - } - }), + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let position = hunk + .multi_buffer_range + .end + .to_point( + &snapshot.buffer_snapshot, + ); + if let Some(hunk) = editor + .go_to_hunk_after_position( + &snapshot, position, cx, + ) + { + let multi_buffer_start = snapshot + .buffer_snapshot + .anchor_before(Point::new( + hunk.row_range.start.0, + 0, + )); + let multi_buffer_end = snapshot + .buffer_snapshot + .anchor_after(Point::new( + hunk.row_range.end.0, + 0, + )); + editor.expand_diff_hunk( + None, + &HoveredHunk { + multi_buffer_range: + multi_buffer_start + ..multi_buffer_end, + status: hunk_status(&hunk), + diff_base_byte_range: hunk + .diff_base_byte_range, + }, + cx, + ); + } + }); + } + }), + ) + .child( + IconButton::new("prev-hunk", IconName::ArrowUp) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Previous Hunk", + &GoToPrevHunk, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let position = hunk + .multi_buffer_range + .start + .to_point( + &snapshot.buffer_snapshot, + ); + let hunk = editor + .go_to_hunk_before_position( + &snapshot, position, cx, + ); + if let Some(hunk) = hunk { + let multi_buffer_start = snapshot + .buffer_snapshot + .anchor_before(Point::new( + hunk.row_range.start.0, + 0, + )); + let multi_buffer_end = snapshot + .buffer_snapshot + .anchor_after(Point::new( + hunk.row_range.end.0, + 0, + )); + editor.expand_diff_hunk( + None, + &HoveredHunk { + multi_buffer_range: + multi_buffer_start + ..multi_buffer_end, + status: hunk_status(&hunk), + diff_base_byte_range: hunk + .diff_base_byte_range, + }, + cx, + ); + } + }); + } + }), + ), ) .child( - v_flex() - .size_full() - .pt(rems(0.25)) - .justify_start() - .child(close_button), + h_flex() + .gap_2() + .pr_6() + .child({ + let focus = editor.focus_handle(cx); + PopoverMenu::new("hunk-controls-dropdown") + .trigger( + IconButton::new( + "toggle_editor_selections_icon", + IconName::EllipsisVertical, + ) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .style(ButtonStyle::Subtle) + .selected( + hunk_controls_menu_handle.is_deployed(), + ) + .when( + !hunk_controls_menu_handle.is_deployed(), + |this| { + this.tooltip(|cx| { + Tooltip::text("Hunk Controls", cx) + }) + }, + ), + ) + .anchor(AnchorCorner::TopRight) + .with_handle(hunk_controls_menu_handle) + .menu(move |cx| { + let focus = focus.clone(); + let menu = + ContextMenu::build(cx, move |menu, _| { + menu.context(focus.clone()).action( + "Discard All", + RevertFile.boxed_clone(), + ) + }); + Some(menu) + }) + }) + .child( + IconButton::new("discard", IconName::RotateCcw) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Discard Hunk", + &RevertSelectedHunks, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + let multi_buffer = + editor.read(cx).buffer().clone(); + let multi_buffer_snapshot = + multi_buffer.read(cx).snapshot(cx); + let mut revert_changes = HashMap::default(); + if let Some(hunk) = + crate::hunk_diff::to_diff_hunk( + &hunk, + &multi_buffer_snapshot, + ) + { + Editor::prepare_revert_change( + &mut revert_changes, + &multi_buffer, + &hunk, + cx, + ); + } + if !revert_changes.is_empty() { + editor.update(cx, |editor, cx| { + editor.revert(revert_changes, cx) + }); + } + } + }), + ) + .child( + IconButton::new("collapse", IconName::Close) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Collapse Hunk", + &ToggleHunkDiff, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ), ), ) - .child(editor_with_deleted_text.clone()) .into_any_element() - }), - priority: 0, + } + }), + } + } + + fn deleted_text_block( + hunk: &HoveredHunk, + diff_base_buffer: Model, + deleted_text_height: u32, + cx: &mut ViewContext<'_, Editor>, + ) -> BlockProperties { + let gutter_color = match hunk.status { + DiffHunkStatus::Added => unreachable!(), + DiffHunkStatus::Modified => cx.theme().status().modified, + DiffHunkStatus::Removed => cx.theme().status().deleted, + }; + let deleted_hunk_color = deleted_hunk_color(cx); + let (editor_height, editor_with_deleted_text) = + editor_with_deleted_text(diff_base_buffer, deleted_hunk_color, hunk, cx); + let editor = cx.view().clone(); + let hunk = hunk.clone(); + let height = editor_height.max(deleted_text_height); + BlockProperties { + position: hunk.multi_buffer_range.start, + height, + style: BlockStyle::Flex, + disposition: BlockDisposition::Above, + priority: 0, + render: Box::new(move |cx| { + let width = EditorElement::diff_hunk_strip_width(cx.line_height()); + let gutter_dimensions = editor.read(cx.context).gutter_dimensions; + + h_flex() + .id(cx.block_id) + .bg(deleted_hunk_color) + .h(height as f32 * cx.line_height()) + .w_full() + .child( + h_flex() + .id("gutter") + .max_w(gutter_dimensions.full_width()) + .min_w(gutter_dimensions.full_width()) + .size_full() + .child( + h_flex() + .id("gutter hunk") + .bg(gutter_color) + .pl(gutter_dimensions.margin + + gutter_dimensions + .git_blame_entries_width + .unwrap_or_default()) + .max_w(width) + .min_w(width) + .size_full() + .cursor(CursorStyle::PointingHand) + .on_mouse_down(MouseButton::Left, { + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ), + ) + .child(editor_with_deleted_text.clone()) + .into_any_element() }), - None, - cx, - ); - if new_block_ids.len() == 1 { - new_block_ids.pop() - } else { - debug_panic!( - "Inserted one editor block but did not receive exactly one block id: {new_block_ids:?}" - ); - None } } @@ -521,7 +715,7 @@ impl Editor { .expanded_hunks .hunks .drain(..) - .filter_map(|expanded_hunk| expanded_hunk.block) + .flat_map(|expanded_hunk| expanded_hunk.blocks.into_iter()) .collect::>(); if to_remove.is_empty() { false @@ -603,7 +797,7 @@ impl Editor { expanded_hunk.folded = true; highlights_to_remove .push(expanded_hunk.hunk_range.clone()); - if let Some(block) = expanded_hunk.block.take() { + for block in expanded_hunk.blocks.drain(..) { blocks_to_remove.insert(block); } break; @@ -650,7 +844,7 @@ impl Editor { } } if !retain { - blocks_to_remove.extend(expanded_hunk.block); + blocks_to_remove.extend(expanded_hunk.blocks.drain(..)); highlights_to_remove.push(expanded_hunk.hunk_range.clone()); } retain @@ -749,7 +943,7 @@ fn added_hunk_color(cx: &AppContext) -> Hsla { } fn deleted_hunk_color(cx: &AppContext) -> Hsla { - let mut deleted_color = cx.theme().status().git().deleted; + let mut deleted_color = cx.theme().status().deleted; deleted_color.fade_out(0.7); deleted_color } @@ -788,32 +982,15 @@ fn editor_with_deleted_text( false, cx, ); - - let subscription_editor = parent_editor.clone(); - editor._subscriptions.extend([ - cx.on_blur(&editor.focus_handle, |editor, cx| { - editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); + editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); + editor + ._subscriptions + .extend([cx.on_blur(&editor.focus_handle, |editor, cx| { editor.change_selections(None, cx, |s| { s.try_cancel(); }); - cx.notify(); - }), - cx.on_focus(&editor.focus_handle, move |editor, cx| { - let restored_highlight = if let Some(parent_editor) = subscription_editor.upgrade() - { - parent_editor.read(cx).current_line_highlight - } else { - None - }; - editor.set_current_line_highlight(restored_highlight); - cx.notify(); - }), - cx.observe_global::(|editor, cx| { - if !editor.is_focused(cx) { - editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); - } - }), - ]); + })]); + let parent_editor_for_reverts = parent_editor.clone(); let original_multi_buffer_range = hunk.multi_buffer_range.clone(); let diff_base_range = hunk.diff_base_byte_range.clone(); From 4b4565fb7afaa7ab2c8d2058c924120fda911311 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 25 Sep 2024 22:55:36 +0200 Subject: [PATCH 335/762] assistant: Enable assistant panel/inline assists in ssh remote projects (#18367) Release Notes: - ssh remoting: Enable assistant panel and inline assists (running on client) --- crates/assistant/src/assistant_panel.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 22237eeb079270..c7a06b428b9f1a 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -960,7 +960,8 @@ impl AssistantPanel { } fn new_context(&mut self, cx: &mut ViewContext) -> Option> { - if self.project.read(cx).is_via_collab() { + let project = self.project.read(cx); + if project.is_via_collab() && project.dev_server_project_id().is_none() { let task = self .context_store .update(cx, |store, cx| store.create_remote_context(cx)); From 7398f795e3fde21b8c1a6a40cd67c0b1854ed60c Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Sep 2024 22:01:12 +0000 Subject: [PATCH 336/762] Ollama llama3.2 default context size (#18366) Release Notes: - Ollama: Added llama3.2 support --- crates/ollama/src/ollama.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index e592bfa17717d3..84404afce13b10 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -83,7 +83,7 @@ fn get_max_tokens(name: &str) -> usize { "codellama" | "starcoder2" => 16384, "mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "dolphin-mixtral" => 32768, "llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder" - | "qwen2.5-coder" => 128000, + | "llama3.2" | "qwen2.5-coder" => 128000, _ => DEFAULT_TOKENS, } .clamp(1, MAXIMUM_TOKENS) From 40408e731e859ecaf03919aaa32f22ff41869522 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Sep 2024 22:01:33 +0000 Subject: [PATCH 337/762] Fix sending alt-enter in terminal (#18363) --- crates/terminal/src/mappings/keys.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/terminal/src/mappings/keys.rs b/crates/terminal/src/mappings/keys.rs index e760db3616fea3..2d4fe4c62e97be 100644 --- a/crates/terminal/src/mappings/keys.rs +++ b/crates/terminal/src/mappings/keys.rs @@ -51,6 +51,7 @@ pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) -> ("escape", AlacModifiers::None) => Some("\x1b".to_string()), ("enter", AlacModifiers::None) => Some("\x0d".to_string()), ("enter", AlacModifiers::Shift) => Some("\x0d".to_string()), + ("enter", AlacModifiers::Alt) => Some("\x1b\x0d".to_string()), ("backspace", AlacModifiers::None) => Some("\x7f".to_string()), //Interesting escape codes ("tab", AlacModifiers::Shift) => Some("\x1b[Z".to_string()), From 64532e94e456dd4595897c96a0d06a7e22379add Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 25 Sep 2024 16:29:04 -0600 Subject: [PATCH 338/762] Move adapters to remote (#18359) Release Notes: - ssh remoting: run LSP Adapters on host --------- Co-authored-by: Mikayla --- Cargo.lock | 1 + crates/languages/Cargo.toml | 54 ++- crates/languages/src/lib.rs | 17 +- crates/project/src/lsp_store.rs | 442 +------------------ crates/project/src/project.rs | 3 +- crates/proto/proto/zed.proto | 74 +--- crates/proto/src/proto.rs | 18 - crates/remote_server/Cargo.toml | 1 + crates/remote_server/src/headless_project.rs | 12 +- crates/zed/Cargo.toml | 2 +- 10 files changed, 76 insertions(+), 548 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 26b979ccf72aa9..0b3ee53e9aa852 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9122,6 +9122,7 @@ dependencies = [ "gpui", "http_client", "language", + "languages", "log", "lsp", "node_runtime", diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 33be1a9809601a..5cb5455dd168d6 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -10,6 +10,25 @@ workspace = true [features] test-support = [] +load-grammars = [ + "tree-sitter-bash", + "tree-sitter-c", + "tree-sitter-cpp", + "tree-sitter-css", + "tree-sitter-go", + "tree-sitter-go-mod", + "tree-sitter-gowork", + "tree-sitter-jsdoc", + "tree-sitter-json", + "tree-sitter-md", + "protols-tree-sitter-proto", + "tree-sitter-python", + "tree-sitter-regex", + "tree-sitter-rust", + "tree-sitter-typescript", + "tree-sitter-yaml", + "tree-sitter" +] [dependencies] anyhow.workspace = true @@ -36,25 +55,26 @@ settings.workspace = true smol.workspace = true task.workspace = true toml.workspace = true -tree-sitter-bash.workspace = true -tree-sitter-c.workspace = true -tree-sitter-cpp.workspace = true -tree-sitter-css.workspace = true -tree-sitter-go.workspace = true -tree-sitter-go-mod.workspace = true -tree-sitter-gowork.workspace = true -tree-sitter-jsdoc.workspace = true -tree-sitter-json.workspace = true -tree-sitter-md.workspace = true -protols-tree-sitter-proto.workspace = true -tree-sitter-python.workspace = true -tree-sitter-regex.workspace = true -tree-sitter-rust.workspace = true -tree-sitter-typescript.workspace = true -tree-sitter-yaml.workspace = true -tree-sitter.workspace = true util.workspace = true +tree-sitter-bash = {workspace = true, optional = true} +tree-sitter-c = {workspace = true, optional = true} +tree-sitter-cpp = {workspace = true, optional = true} +tree-sitter-css = {workspace = true, optional = true} +tree-sitter-go = {workspace = true, optional = true} +tree-sitter-go-mod = {workspace = true, optional = true} +tree-sitter-gowork = {workspace = true, optional = true} +tree-sitter-jsdoc = {workspace = true, optional = true} +tree-sitter-json = {workspace = true, optional = true} +tree-sitter-md = {workspace = true, optional = true} +protols-tree-sitter-proto = {workspace = true, optional = true} +tree-sitter-python = {workspace = true, optional = true} +tree-sitter-regex = {workspace = true, optional = true} +tree-sitter-rust = {workspace = true, optional = true} +tree-sitter-typescript = {workspace = true, optional = true} +tree-sitter-yaml = {workspace = true, optional = true} +tree-sitter = {workspace = true, optional = true} + [dev-dependencies] text.workspace = true theme = { workspace = true, features = ["test-support"] } diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 7435ddb13196dd..295df6e419b7ec 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -31,6 +31,7 @@ mod yaml; struct LanguageDir; pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mut AppContext) { + #[cfg(feature = "load-grammars")] languages.register_native_grammars([ ("bash", tree_sitter_bash::LANGUAGE), ("c", tree_sitter_c::LANGUAGE), @@ -282,9 +283,21 @@ fn load_config(name: &str) -> LanguageConfig { ) .unwrap(); - ::toml::from_str(&config_toml) + #[allow(unused_mut)] + let mut config: LanguageConfig = ::toml::from_str(&config_toml) .with_context(|| format!("failed to load config.toml for language {name:?}")) - .unwrap() + .unwrap(); + + #[cfg(not(feature = "load-grammars"))] + { + config = LanguageConfig { + name: config.name, + matcher: config.matcher, + ..Default::default() + } + } + + config } fn load_queries(name: &str) -> LanguageQueries { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index bef57bafb48cf9..a4a13b296ed5c1 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -36,10 +36,10 @@ use language::{ markdown, point_to_lsp, prepare_completion_documentation, proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, - DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageConfig, - LanguageMatcher, LanguageName, LanguageRegistry, LanguageServerBinaryStatus, - LanguageServerName, LocalFile, LspAdapter, LspAdapterDelegate, Patch, PointUtf16, - TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, + DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageName, + LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LocalFile, LspAdapter, + LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, + Unclipped, }; use lsp::{ CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, @@ -53,7 +53,7 @@ use parking_lot::{Mutex, RwLock}; use postage::watch; use rand::prelude::*; -use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient}; +use rpc::AnyProtoClient; use serde::Serialize; use settings::{Settings, SettingsLocation, SettingsStore}; use sha2::{Digest, Sha256}; @@ -644,16 +644,15 @@ pub struct RemoteLspStore { impl RemoteLspStore {} -pub struct SshLspStore { - upstream_client: AnyProtoClient, - current_lsp_settings: HashMap, -} +// pub struct SshLspStore { +// upstream_client: AnyProtoClient, +// current_lsp_settings: HashMap, +// } #[allow(clippy::large_enum_variant)] pub enum LspStoreMode { Local(LocalLspStore), // ssh host and collab host Remote(RemoteLspStore), // collab guest - Ssh(SshLspStore), // ssh client } impl LspStoreMode { @@ -661,10 +660,6 @@ impl LspStoreMode { matches!(self, LspStoreMode::Local(_)) } - fn is_ssh(&self) -> bool { - matches!(self, LspStoreMode::Ssh(_)) - } - fn is_remote(&self) -> bool { matches!(self, LspStoreMode::Remote(_)) } @@ -787,13 +782,6 @@ impl LspStore { } } - pub fn as_ssh(&self) -> Option<&SshLspStore> { - match &self.mode { - LspStoreMode::Ssh(ssh_lsp_store) => Some(ssh_lsp_store), - _ => None, - } - } - pub fn as_local(&self) -> Option<&LocalLspStore> { match &self.mode { LspStoreMode::Local(local_lsp_store) => Some(local_lsp_store), @@ -810,9 +798,6 @@ impl LspStore { pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { match &self.mode { - LspStoreMode::Ssh(SshLspStore { - upstream_client, .. - }) => Some((upstream_client.clone(), SSH_PROJECT_ID)), LspStoreMode::Remote(RemoteLspStore { upstream_client, upstream_project_id, @@ -827,11 +812,7 @@ impl LspStore { new_settings: HashMap, ) -> Option> { match &mut self.mode { - LspStoreMode::Ssh(SshLspStore { - current_lsp_settings, - .. - }) - | LspStoreMode::Local(LocalLspStore { + LspStoreMode::Local(LocalLspStore { current_lsp_settings, .. }) => { @@ -919,43 +900,6 @@ impl LspStore { }) } - pub fn new_ssh( - buffer_store: Model, - worktree_store: Model, - languages: Arc, - upstream_client: AnyProtoClient, - cx: &mut ModelContext, - ) -> Self { - cx.subscribe(&buffer_store, Self::on_buffer_store_event) - .detach(); - cx.subscribe(&worktree_store, Self::on_worktree_store_event) - .detach(); - cx.observe_global::(Self::on_settings_changed) - .detach(); - - Self { - mode: LspStoreMode::Ssh(SshLspStore { - upstream_client, - current_lsp_settings: Default::default(), - }), - downstream_client: None, - buffer_store, - worktree_store, - languages: languages.clone(), - language_server_ids: Default::default(), - language_server_statuses: Default::default(), - nonce: StdRng::from_entropy().gen(), - buffer_snapshots: Default::default(), - next_diagnostic_group_id: Default::default(), - diagnostic_summaries: Default::default(), - - diagnostics: Default::default(), - active_entry: None, - _maintain_workspace_config: Self::maintain_workspace_config(cx), - _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), - } - } - pub fn new_remote( buffer_store: Model, worktree_store: Model, @@ -3697,11 +3641,11 @@ impl LspStore { mut cx: AsyncAppContext, ) -> Result { let response_from_ssh = this.update(&mut cx, |this, _| { - let ssh = this.as_ssh()?; + let (upstream_client, project_id) = this.upstream_client()?; let mut payload = envelope.payload.clone(); - payload.project_id = SSH_PROJECT_ID; + payload.project_id = project_id; - Some(ssh.upstream_client.request(payload)) + Some(upstream_client.request(payload)) })?; if let Some(response_from_ssh) = response_from_ssh { return response_from_ssh.await; @@ -5009,165 +4953,6 @@ impl LspStore { Ok(proto::Ack {}) } - pub async fn handle_create_language_server( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let server_name = LanguageServerName::from_proto(envelope.payload.name); - - let binary = envelope - .payload - .binary - .ok_or_else(|| anyhow!("missing binary"))?; - let binary = LanguageServerBinary { - path: PathBuf::from(binary.path), - env: None, - arguments: binary.arguments.into_iter().map(Into::into).collect(), - }; - let language = envelope - .payload - .language - .ok_or_else(|| anyhow!("missing language"))?; - let language_name = LanguageName::from_proto(language.name); - let matcher: LanguageMatcher = serde_json::from_str(&language.matcher)?; - - this.update(&mut cx, |this, cx| { - let Some(worktree) = this - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - else { - return Err(anyhow!("worktree not found")); - }; - - this.languages - .register_language(language_name.clone(), None, matcher.clone(), { - let language_name = language_name.clone(); - move || { - Ok(( - LanguageConfig { - name: language_name.clone(), - matcher: matcher.clone(), - ..Default::default() - }, - Default::default(), - Default::default(), - )) - } - }); - cx.background_executor() - .spawn(this.languages.language_for_name(language_name.0.as_ref())) - .detach(); - - // host - let adapter = this.languages.get_or_register_lsp_adapter( - language_name.clone(), - server_name.clone(), - || { - Arc::new(SshLspAdapter::new( - server_name, - binary, - envelope.payload.initialization_options, - envelope.payload.code_action_kinds, - )) - }, - ); - - this.start_language_server(&worktree, adapter, language_name, cx); - Ok(()) - })??; - Ok(proto::Ack {}) - } - - pub async fn handle_which_command( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let command = PathBuf::from(envelope.payload.command); - let response = this - .update(&mut cx, |this, cx| { - let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); - anyhow::Ok( - cx.spawn(|_, _| async move { delegate.which(command.as_os_str()).await }), - ) - })?? - .await; - - Ok(proto::WhichCommandResponse { - path: response.map(|path| path.to_string_lossy().to_string()), - }) - } - - pub async fn handle_shell_env( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let response = this - .update(&mut cx, |this, cx| { - let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); - anyhow::Ok(cx.spawn(|_, _| async move { delegate.shell_env().await })) - })?? - .await; - - Ok(proto::ShellEnvResponse { - env: response.into_iter().collect(), - }) - } - pub async fn handle_try_exec( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let binary = envelope - .payload - .binary - .ok_or_else(|| anyhow!("missing binary"))?; - let binary = LanguageServerBinary { - path: PathBuf::from(binary.path), - env: None, - arguments: binary.arguments.into_iter().map(Into::into).collect(), - }; - this.update(&mut cx, |this, cx| { - let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); - anyhow::Ok(cx.spawn(|_, _| async move { delegate.try_exec(binary).await })) - })?? - .await?; - - Ok(proto::Ack {}) - } - - pub async fn handle_read_text_file( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let path = envelope - .payload - .path - .ok_or_else(|| anyhow!("missing path"))?; - let worktree_id = WorktreeId::from_proto(path.worktree_id); - let path = PathBuf::from(path.path); - let response = this - .update(&mut cx, |this, cx| { - let worktree = this.worktree_for_id(worktree_id, cx)?; - let delegate = LocalLspAdapterDelegate::for_local(this, &worktree, cx); - anyhow::Ok(cx.spawn(|_, _| async move { delegate.read_text_file(path).await })) - })?? - .await?; - - Ok(proto::ReadTextFileResponse { text: response }) - } - async fn handle_apply_additional_edits_for_completion( this: Model, envelope: TypedEnvelope, @@ -5388,89 +5173,6 @@ impl LspStore { .reorder_language_servers(&language, enabled_lsp_adapters); } - fn start_language_server_on_ssh_host( - &mut self, - worktree: &Model, - adapter: Arc, - language: LanguageName, - cx: &mut ModelContext, - ) { - let ssh = self.as_ssh().unwrap(); - - let delegate = Arc::new(SshLspAdapterDelegate { - lsp_store: cx.handle().downgrade(), - worktree: worktree.read(cx).snapshot(), - upstream_client: ssh.upstream_client.clone(), - language_registry: self.languages.clone(), - }) as Arc; - - let Some((upstream_client, project_id)) = self.upstream_client() else { - return; - }; - let worktree_id = worktree.read(cx).id().to_proto(); - let name = adapter.name().to_string(); - - let Some(available_language) = self.languages.available_language_for_name(&language) else { - log::error!("failed to find available language {language}"); - return; - }; - - let user_binary_task = - self.get_language_server_binary(adapter.clone(), delegate.clone(), false, cx); - - let task = cx.spawn(|_, _| async move { - let binary = user_binary_task.await?; - let name = adapter.name(); - let code_action_kinds = adapter - .adapter - .code_action_kinds() - .map(|kinds| serde_json::to_string(&kinds)) - .transpose()?; - let get_options = adapter.adapter.clone().initialization_options(&delegate); - let initialization_options = get_options - .await? - .map(|options| serde_json::to_string(&options)) - .transpose()?; - - let language_server_command = proto::LanguageServerCommand { - path: binary.path.to_string_lossy().to_string(), - arguments: binary - .arguments - .iter() - .map(|args| args.to_string_lossy().to_string()) - .collect(), - env: binary.env.unwrap_or_default().into_iter().collect(), - }; - - upstream_client - .request(proto::CreateLanguageServer { - project_id, - worktree_id, - name: name.0.to_string(), - binary: Some(language_server_command), - initialization_options, - code_action_kinds, - language: Some(proto::AvailableLanguage { - name: language.to_proto(), - matcher: serde_json::to_string(&available_language.matcher())?, - }), - }) - .await - }); - cx.spawn(|this, mut cx| async move { - if let Err(e) = task.await { - this.update(&mut cx, |_this, cx| { - cx.emit(LspStoreEvent::Notification(format!( - "failed to start {}: {}", - name, e - ))) - }) - .ok(); - } - }) - .detach(); - } - fn get_language_server_binary( &self, adapter: Arc, @@ -5558,11 +5260,6 @@ impl LspStore { return; } - if self.mode.is_ssh() { - self.start_language_server_on_ssh_host(worktree_handle, adapter, language, cx); - return; - } - let project_settings = ProjectSettings::get( Some(SettingsLocation { worktree_id, @@ -5852,9 +5549,6 @@ impl LspStore { } else { Task::ready(Vec::new()) } - } else if self.mode.is_ssh() { - // TODO ssh - Task::ready(Vec::new()) } else { Task::ready(Vec::new()) } @@ -7905,116 +7599,6 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { } } -struct SshLspAdapterDelegate { - lsp_store: WeakModel, - worktree: worktree::Snapshot, - upstream_client: AnyProtoClient, - language_registry: Arc, -} - -#[async_trait] -impl LspAdapterDelegate for SshLspAdapterDelegate { - fn show_notification(&self, message: &str, cx: &mut AppContext) { - self.lsp_store - .update(cx, |_, cx| { - cx.emit(LspStoreEvent::Notification(message.to_owned())) - }) - .ok(); - } - - async fn npm_package_installed_version( - &self, - _package_name: &str, - ) -> Result> { - Ok(None) - } - - fn http_client(&self) -> Arc { - Arc::new(BlockedHttpClient) - } - - fn worktree_id(&self) -> WorktreeId { - self.worktree.id() - } - - fn worktree_root_path(&self) -> &Path { - self.worktree.abs_path().as_ref() - } - - async fn shell_env(&self) -> HashMap { - use rpc::proto::SSH_PROJECT_ID; - - self.upstream_client - .request(proto::ShellEnv { - project_id: SSH_PROJECT_ID, - worktree_id: self.worktree_id().to_proto(), - }) - .await - .map(|response| response.env.into_iter().collect()) - .unwrap_or_default() - } - - async fn which(&self, command: &OsStr) -> Option { - use rpc::proto::SSH_PROJECT_ID; - - self.upstream_client - .request(proto::WhichCommand { - project_id: SSH_PROJECT_ID, - worktree_id: self.worktree_id().to_proto(), - command: command.to_string_lossy().to_string(), - }) - .await - .log_err() - .and_then(|response| response.path) - .map(PathBuf::from) - } - - async fn try_exec(&self, command: LanguageServerBinary) -> Result<()> { - self.upstream_client - .request(proto::TryExec { - project_id: rpc::proto::SSH_PROJECT_ID, - worktree_id: self.worktree.id().to_proto(), - binary: Some(proto::LanguageServerCommand { - path: command.path.to_string_lossy().to_string(), - arguments: command - .arguments - .into_iter() - .map(|s| s.to_string_lossy().to_string()) - .collect(), - env: command.env.unwrap_or_default().into_iter().collect(), - }), - }) - .await?; - Ok(()) - } - - async fn language_server_download_dir(&self, _: &LanguageServerName) -> Option> { - None - } - - fn update_status( - &self, - server_name: LanguageServerName, - status: language::LanguageServerBinaryStatus, - ) { - self.language_registry - .update_lsp_status(server_name, status); - } - - async fn read_text_file(&self, path: PathBuf) -> Result { - self.upstream_client - .request(proto::ReadTextFile { - project_id: rpc::proto::SSH_PROJECT_ID, - path: Some(proto::ProjectPath { - worktree_id: self.worktree.id().to_proto(), - path: path.to_string_lossy().to_string(), - }), - }) - .await - .map(|r| r.text) - } -} - async fn populate_labels_for_symbols( symbols: Vec, language_registry: &Arc, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 10fd88f286d994..c3b3c383c11ffd 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -706,11 +706,12 @@ impl Project { let environment = ProjectEnvironment::new(&worktree_store, None, cx); let lsp_store = cx.new_model(|cx| { - LspStore::new_ssh( + LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), languages.clone(), ssh.clone().into(), + SSH_PROJECT_ID, cx, ) }); diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 475ed139edfb87..d81ef35f6bffbd 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -283,18 +283,6 @@ message Envelope { CloseBuffer close_buffer = 245; UpdateUserSettings update_user_settings = 246; - CreateLanguageServer create_language_server = 247; - - WhichCommand which_command = 248; - WhichCommandResponse which_command_response = 249; - - ShellEnv shell_env = 250; - ShellEnvResponse shell_env_response = 251; - - TryExec try_exec = 252; - ReadTextFile read_text_file = 253; - ReadTextFileResponse read_text_file_response = 254; - CheckFileExists check_file_exists = 255; CheckFileExistsResponse check_file_exists_response = 256; // current max } @@ -302,6 +290,7 @@ message Envelope { reserved 158 to 161; reserved 166 to 169; reserved 224 to 229; + reserved 247 to 254; } // Messages @@ -2517,67 +2506,6 @@ message UpdateUserSettings { string content = 2; } -message LanguageServerCommand { - string path = 1; - repeated string arguments = 2; - map env = 3; -} - -message AvailableLanguage { - string name = 7; - string matcher = 8; -} - -message CreateLanguageServer { - uint64 project_id = 1; - uint64 worktree_id = 2; - string name = 3; - - LanguageServerCommand binary = 4; - optional string initialization_options = 5; - optional string code_action_kinds = 6; - - AvailableLanguage language = 7; -} - -message WhichCommand { - uint64 project_id = 1; - uint64 worktree_id = 2; - string command = 3; -} - -message WhichCommandResponse { - optional string path = 1; -} - -message ShellEnv { - uint64 project_id = 1; - uint64 worktree_id = 2; -} - -message ShellEnvResponse { - map env = 1; -} - -message ReadTextFile { - uint64 project_id = 1; - ProjectPath path = 2; -} - -message ReadTextFileResponse { - string text = 1; -} - -message TryExec { - uint64 project_id = 1; - uint64 worktree_id = 2; - LanguageServerCommand binary = 3; -} - -message TryExecResponse { - string text = 1; -} - message CheckFileExists { uint64 project_id = 1; string path = 2; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 4146a47409ad71..799d51defec718 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -365,14 +365,6 @@ messages!( (FindSearchCandidatesResponse, Background), (CloseBuffer, Foreground), (UpdateUserSettings, Foreground), - (CreateLanguageServer, Foreground), - (WhichCommand, Foreground), - (WhichCommandResponse, Foreground), - (ShellEnv, Foreground), - (ShellEnvResponse, Foreground), - (TryExec, Foreground), - (ReadTextFile, Foreground), - (ReadTextFileResponse, Foreground), (CheckFileExists, Background), (CheckFileExistsResponse, Background) ); @@ -498,11 +490,6 @@ request_messages!( (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (AddWorktree, AddWorktreeResponse), - (CreateLanguageServer, Ack), - (WhichCommand, WhichCommandResponse), - (ShellEnv, ShellEnvResponse), - (ReadTextFile, ReadTextFileResponse), - (TryExec, Ack), (CheckFileExists, CheckFileExistsResponse) ); @@ -577,11 +564,6 @@ entity_messages!( SynchronizeContexts, LspExtSwitchSourceHeader, UpdateUserSettings, - CreateLanguageServer, - WhichCommand, - ShellEnv, - TryExec, - ReadTextFile, CheckFileExists, ); diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index 64db2616e9b2f8..b15970042d0f7f 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -39,6 +39,7 @@ shellexpand.workspace = true smol.workspace = true worktree.workspace = true language.workspace = true +languages.workspace = true util.workspace = true [dev-dependencies] diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 84fb22b282d37d..4b13938d8ca2f5 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -44,6 +44,10 @@ impl HeadlessProject { pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); + let node_runtime = NodeRuntime::unavailable(); + + languages::init(languages.clone(), node_runtime.clone(), cx); + let worktree_store = cx.new_model(|cx| { let mut store = WorktreeStore::local(true, fs.clone()); store.shared(SSH_PROJECT_ID, session.clone().into(), cx); @@ -56,7 +60,7 @@ impl HeadlessProject { }); let prettier_store = cx.new_model(|cx| { PrettierStore::new( - NodeRuntime::unavailable(), + node_runtime, fs.clone(), languages.clone(), worktree_store.clone(), @@ -116,12 +120,6 @@ impl HeadlessProject { client.add_model_request_handler(BufferStore::handle_update_buffer); client.add_model_message_handler(BufferStore::handle_close_buffer); - client.add_model_request_handler(LspStore::handle_create_language_server); - client.add_model_request_handler(LspStore::handle_which_command); - client.add_model_request_handler(LspStore::handle_shell_env); - client.add_model_request_handler(LspStore::handle_try_exec); - client.add_model_request_handler(LspStore::handle_read_text_file); - BufferStore::init(&client); WorktreeStore::init(&client); SettingsObserver::init(&client); diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index eb8f45d92e476d..897e0e9a28bca8 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -64,7 +64,7 @@ language.workspace = true language_model.workspace = true language_selector.workspace = true language_tools.workspace = true -languages.workspace = true +languages = {workspace = true, features = ["load-grammars"] } libc.workspace = true log.workspace = true markdown_preview.workspace = true From 3161aedcb02e5e18bc802ffa38504909490938c6 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 25 Sep 2024 16:03:08 -0700 Subject: [PATCH 339/762] Fix broken collaboration UI from #18308 (#18372) Fixes a bug introduced by #18308, that caused the call controls to render incorrectly. Release Notes: - N/A --- crates/title_bar/src/collab.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index e9f89643d5729f..2f534589050358 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -284,14 +284,14 @@ impl TitleBar { let room = room.read(cx); let project = self.project.read(cx); + let is_local = project.is_local() || project.is_via_ssh(); let is_dev_server_project = project.dev_server_project_id().is_some(); - let is_shared = project.is_shared(); + let is_shared = (is_local || is_dev_server_project) && project.is_shared(); let is_muted = room.is_muted(); let is_deafened = room.is_deafened().unwrap_or(false); let is_screen_sharing = room.is_screen_sharing(); let can_use_microphone = room.can_use_microphone(); - let can_share_projects = room.can_share_projects() - && (is_dev_server_project || project.is_local() || project.is_via_ssh()); + let can_share_projects = room.can_share_projects(); let platform_supported = match self.platform_style { PlatformStyle::Mac => true, PlatformStyle::Linux | PlatformStyle::Windows => false, @@ -299,7 +299,7 @@ impl TitleBar { let mut children = Vec::new(); - if can_share_projects { + if (is_local || is_dev_server_project) && can_share_projects { children.push( Button::new( "toggle_sharing", From 6167688a63eed63791814e39b3b8fd1a10da0e9b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 25 Sep 2024 16:33:00 -0700 Subject: [PATCH 340/762] Proposed changes editor features (#18373) This PR adds some more functionality to the Proposed Changes Editor view, which we'll be using in https://github.com/zed-industries/zed/pull/18240 for allowing the assistant to propose changes to a set of buffers. * Add an `Apply All` button, and fully implement applying of changes to the base buffer * Make the proposed changes editor searchable * Fix a bug in branch buffers' diff state management Release Notes: - N/A --- crates/editor/src/editor.rs | 4 +- crates/editor/src/proposed_changes_editor.rs | 84 +++++++++++- crates/language/src/buffer.rs | 130 ++++++++++++++----- crates/language/src/buffer_tests.rs | 96 ++++++++------ crates/zed/src/zed.rs | 3 + 5 files changed, 241 insertions(+), 76 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 78c8ba6920337c..23448b43a7cc72 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -98,7 +98,9 @@ use language::{ }; use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange}; use linked_editing_ranges::refresh_linked_ranges; -use proposed_changes_editor::{ProposedChangesBuffer, ProposedChangesEditor}; +pub use proposed_changes_editor::{ + ProposedChangesBuffer, ProposedChangesEditor, ProposedChangesEditorToolbar, +}; use similar::{ChangeTag, TextDiff}; use task::{ResolvedTask, TaskTemplate, TaskVariables}; diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 3979e558a42364..ec0c05d88382c0 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -6,10 +6,13 @@ use language::{Buffer, BufferEvent, Capability}; use multi_buffer::{ExcerptRange, MultiBuffer}; use project::Project; use smol::stream::StreamExt; -use std::{ops::Range, time::Duration}; +use std::{any::TypeId, ops::Range, time::Duration}; use text::ToOffset; use ui::prelude::*; -use workspace::Item; +use workspace::{ + searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation, + ToolbarItemView, +}; pub struct ProposedChangesEditor { editor: View, @@ -23,6 +26,10 @@ pub struct ProposedChangesBuffer { pub ranges: Vec>, } +pub struct ProposedChangesEditorToolbar { + current_editor: Option>, +} + impl ProposedChangesEditor { pub fn new( buffers: Vec>, @@ -96,6 +103,17 @@ impl ProposedChangesEditor { self.recalculate_diffs_tx.unbounded_send(buffer).ok(); } } + + fn apply_all_changes(&self, cx: &mut ViewContext) { + let buffers = self.editor.read(cx).buffer.read(cx).all_buffers(); + for branch_buffer in buffers { + if let Some(base_buffer) = branch_buffer.read(cx).diff_base_buffer() { + base_buffer.update(cx, |base_buffer, cx| { + base_buffer.merge(&branch_buffer, None, cx) + }); + } + } + } } impl Render for ProposedChangesEditor { @@ -122,4 +140,66 @@ impl Item for ProposedChangesEditor { fn tab_content_text(&self, _cx: &WindowContext) -> Option { Some("Proposed changes".into()) } + + fn as_searchable(&self, _: &View) -> Option> { + Some(Box::new(self.editor.clone())) + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a View, + _: &'a AppContext, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.to_any()) + } else if type_id == TypeId::of::() { + Some(self.editor.to_any()) + } else { + None + } + } +} + +impl ProposedChangesEditorToolbar { + pub fn new() -> Self { + Self { + current_editor: None, + } + } + + fn get_toolbar_item_location(&self) -> ToolbarItemLocation { + if self.current_editor.is_some() { + ToolbarItemLocation::PrimaryRight + } else { + ToolbarItemLocation::Hidden + } + } +} + +impl Render for ProposedChangesEditorToolbar { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + let editor = self.current_editor.clone(); + Button::new("apply-changes", "Apply All").on_click(move |_, cx| { + if let Some(editor) = &editor { + editor.update(cx, |editor, cx| { + editor.apply_all_changes(cx); + }); + } + }) + } +} + +impl EventEmitter for ProposedChangesEditorToolbar {} + +impl ToolbarItemView for ProposedChangesEditorToolbar { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn workspace::ItemHandle>, + _cx: &mut ViewContext, + ) -> workspace::ToolbarItemLocation { + self.current_editor = + active_pane_item.and_then(|item| item.downcast::()); + self.get_toolbar_item_location() + } } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 5735ee961651ab..7abc9b8dba146a 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -87,7 +87,11 @@ pub type BufferRow = u32; #[derive(Clone)] enum BufferDiffBase { Git(Rope), - PastBufferVersion(Model, BufferSnapshot), + PastBufferVersion { + buffer: Model, + rope: Rope, + operations_to_ignore: Vec, + }, } /// An in-memory representation of a source code file, including its text, @@ -795,19 +799,15 @@ impl Buffer { let this = cx.handle(); cx.new_model(|cx| { let mut branch = Self { - diff_base: Some(BufferDiffBase::PastBufferVersion( - this.clone(), - self.snapshot(), - )), + diff_base: Some(BufferDiffBase::PastBufferVersion { + buffer: this.clone(), + rope: self.as_rope().clone(), + operations_to_ignore: Vec::new(), + }), language: self.language.clone(), has_conflict: self.has_conflict, has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()), - _subscriptions: vec![cx.subscribe(&this, |branch: &mut Self, _, event, cx| { - if let BufferEvent::Operation { operation, .. } = event { - branch.apply_ops([operation.clone()], cx); - branch.diff_base_version += 1; - } - })], + _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)], ..Self::build( self.text.branch(), None, @@ -823,18 +823,74 @@ impl Buffer { }) } - pub fn merge(&mut self, branch: &Model, cx: &mut ModelContext) { - let branch = branch.read(cx); - let edits = branch - .edits_since::(&self.version) - .map(|edit| { - ( - edit.old, - branch.text_for_range(edit.new).collect::(), + /// Applies all of the changes in `branch` buffer that intersect the given `range` + /// to this buffer. + pub fn merge( + &mut self, + branch: &Model, + range: Option>, + cx: &mut ModelContext, + ) { + let edits = branch.read_with(cx, |branch, _| { + branch + .edits_since_in_range::( + &self.version, + range.unwrap_or(Anchor::MIN..Anchor::MAX), ) - }) - .collect::>(); - self.edit(edits, None, cx); + .map(|edit| { + ( + edit.old, + branch.text_for_range(edit.new).collect::(), + ) + }) + .collect::>() + }); + let operation = self.edit(edits, None, cx); + + // Prevent this operation from being reapplied to the branch. + branch.update(cx, |branch, cx| { + if let Some(BufferDiffBase::PastBufferVersion { + operations_to_ignore, + .. + }) = &mut branch.diff_base + { + operations_to_ignore.extend(operation); + } + cx.emit(BufferEvent::Edited) + }); + } + + fn on_base_buffer_event( + &mut self, + _: Model, + event: &BufferEvent, + cx: &mut ModelContext, + ) { + if let BufferEvent::Operation { operation, .. } = event { + if let Some(BufferDiffBase::PastBufferVersion { + operations_to_ignore, + .. + }) = &mut self.diff_base + { + let mut is_ignored = false; + if let Operation::Buffer(text::Operation::Edit(buffer_operation)) = &operation { + operations_to_ignore.retain(|operation_to_ignore| { + match buffer_operation.timestamp.cmp(&operation_to_ignore) { + Ordering::Less => true, + Ordering::Equal => { + is_ignored = true; + false + } + Ordering::Greater => false, + } + }); + } + if !is_ignored { + self.apply_ops([operation.clone()], cx); + self.diff_base_version += 1; + } + } + } } #[cfg(test)] @@ -1017,9 +1073,8 @@ impl Buffer { /// Returns the current diff base, see [Buffer::set_diff_base]. pub fn diff_base(&self) -> Option<&Rope> { match self.diff_base.as_ref()? { - BufferDiffBase::Git(rope) => Some(rope), - BufferDiffBase::PastBufferVersion(_, buffer_snapshot) => { - Some(buffer_snapshot.as_rope()) + BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => { + Some(rope) } } } @@ -1050,29 +1105,36 @@ impl Buffer { self.diff_base_version } + pub fn diff_base_buffer(&self) -> Option> { + match self.diff_base.as_ref()? { + BufferDiffBase::Git(_) => None, + BufferDiffBase::PastBufferVersion { buffer, .. } => Some(buffer.clone()), + } + } + /// Recomputes the diff. pub fn recalculate_diff(&mut self, cx: &mut ModelContext) -> Option> { - let diff_base_rope = match self.diff_base.as_mut()? { + let diff_base_rope = match self.diff_base.as_ref()? { BufferDiffBase::Git(rope) => rope.clone(), - BufferDiffBase::PastBufferVersion(base_buffer, base_buffer_snapshot) => { - let new_base_snapshot = base_buffer.read(cx).snapshot(); - *base_buffer_snapshot = new_base_snapshot; - base_buffer_snapshot.as_rope().clone() - } + BufferDiffBase::PastBufferVersion { buffer, .. } => buffer.read(cx).as_rope().clone(), }; - let snapshot = self.snapshot(); + let snapshot = self.snapshot(); let mut diff = self.git_diff.clone(); let diff = cx.background_executor().spawn(async move { diff.update(&diff_base_rope, &snapshot).await; - diff + (diff, diff_base_rope) }); Some(cx.spawn(|this, mut cx| async move { - let buffer_diff = diff.await; + let (buffer_diff, diff_base_rope) = diff.await; this.update(&mut cx, |this, cx| { this.git_diff = buffer_diff; this.non_text_state_update_count += 1; + if let Some(BufferDiffBase::PastBufferVersion { rope, .. }) = &mut this.diff_base { + *rope = diff_base_rope; + cx.emit(BufferEvent::DiffBaseChanged); + } cx.emit(BufferEvent::DiffUpdated); }) .ok(); diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 1335a94dd0313f..49cc31067b93ae 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -2413,80 +2413,98 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { }); // Edits to the branch are not applied to the base. - branch_buffer.update(cx, |buffer, cx| { - buffer.edit( - [(Point::new(1, 0)..Point::new(1, 0), "ONE_POINT_FIVE\n")], + branch_buffer.update(cx, |branch_buffer, cx| { + branch_buffer.edit( + [ + (Point::new(1, 0)..Point::new(1, 0), "1.5\n"), + (Point::new(2, 0)..Point::new(2, 5), "THREE"), + ], None, cx, ) }); branch_buffer.read_with(cx, |branch_buffer, cx| { assert_eq!(base_buffer.read(cx).text(), "one\ntwo\nthree\n"); - assert_eq!(branch_buffer.text(), "one\nONE_POINT_FIVE\ntwo\nthree\n"); + assert_eq!(branch_buffer.text(), "one\n1.5\ntwo\nTHREE\n"); }); + // The branch buffer maintains a diff with respect to its base buffer. + start_recalculating_diff(&branch_buffer, cx); + cx.run_until_parked(); + assert_diff_hunks( + &branch_buffer, + cx, + &[(1..2, "", "1.5\n"), (3..4, "three\n", "THREE\n")], + ); + // Edits to the base are applied to the branch. base_buffer.update(cx, |buffer, cx| { buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "ZERO\n")], None, cx) }); branch_buffer.read_with(cx, |branch_buffer, cx| { assert_eq!(base_buffer.read(cx).text(), "ZERO\none\ntwo\nthree\n"); - assert_eq!( - branch_buffer.text(), - "ZERO\none\nONE_POINT_FIVE\ntwo\nthree\n" - ); + assert_eq!(branch_buffer.text(), "ZERO\none\n1.5\ntwo\nTHREE\n"); }); - assert_diff_hunks(&branch_buffer, cx, &[(2..3, "", "ONE_POINT_FIVE\n")]); + // Until the git diff recalculation is complete, the git diff references + // the previous content of the base buffer, so that it stays in sync. + start_recalculating_diff(&branch_buffer, cx); + assert_diff_hunks( + &branch_buffer, + cx, + &[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")], + ); + cx.run_until_parked(); + assert_diff_hunks( + &branch_buffer, + cx, + &[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")], + ); // Edits to any replica of the base are applied to the branch. base_buffer_replica.update(cx, |buffer, cx| { - buffer.edit( - [(Point::new(2, 0)..Point::new(2, 0), "TWO_POINT_FIVE\n")], - None, - cx, - ) + buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "2.5\n")], None, cx) }); branch_buffer.read_with(cx, |branch_buffer, cx| { - assert_eq!( - base_buffer.read(cx).text(), - "ZERO\none\ntwo\nTWO_POINT_FIVE\nthree\n" - ); - assert_eq!( - branch_buffer.text(), - "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" - ); + assert_eq!(base_buffer.read(cx).text(), "ZERO\none\ntwo\n2.5\nthree\n"); + assert_eq!(branch_buffer.text(), "ZERO\none\n1.5\ntwo\n2.5\nTHREE\n"); }); // Merging the branch applies all of its changes to the base. base_buffer.update(cx, |base_buffer, cx| { - base_buffer.merge(&branch_buffer, cx); + base_buffer.merge(&branch_buffer, None, cx); + }); + + branch_buffer.update(cx, |branch_buffer, cx| { assert_eq!( - base_buffer.text(), - "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" + base_buffer.read(cx).text(), + "ZERO\none\n1.5\ntwo\n2.5\nTHREE\n" ); + assert_eq!(branch_buffer.text(), "ZERO\none\n1.5\ntwo\n2.5\nTHREE\n"); }); } +fn start_recalculating_diff(buffer: &Model, cx: &mut TestAppContext) { + buffer + .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) + .detach(); +} + +#[track_caller] fn assert_diff_hunks( buffer: &Model, cx: &mut TestAppContext, expected_hunks: &[(Range, &str, &str)], ) { - buffer - .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) - .detach(); - cx.executor().run_until_parked(); - - buffer.read_with(cx, |buffer, _| { - let snapshot = buffer.snapshot(); - assert_hunks( - snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX), - &snapshot, - &buffer.diff_base().unwrap().to_string(), - expected_hunks, - ); - }); + let (snapshot, diff_base) = buffer.read_with(cx, |buffer, _| { + (buffer.snapshot(), buffer.diff_base().unwrap().to_string()) + }); + assert_hunks( + snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX), + &snapshot, + &diff_base, + expected_hunks, + ); } #[gpui::test(iterations = 100)] diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index c631c01f99a1a6..4dc378a755bdae 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -14,6 +14,7 @@ use breadcrumbs::Breadcrumbs; use client::ZED_URL_SCHEME; use collections::VecDeque; use command_palette_hooks::CommandPaletteFilter; +use editor::ProposedChangesEditorToolbar; use editor::{scroll::Autoscroll, Editor, MultiBuffer}; use feature_flags::FeatureFlagAppExt; use gpui::{ @@ -582,6 +583,8 @@ fn initialize_pane(workspace: &mut Workspace, pane: &View, cx: &mut ViewCo let buffer_search_bar = cx.new_view(search::BufferSearchBar::new); toolbar.add_item(buffer_search_bar.clone(), cx); + let proposed_change_bar = cx.new_view(|_| ProposedChangesEditorToolbar::new()); + toolbar.add_item(proposed_change_bar, cx); let quick_action_bar = cx.new_view(|cx| QuickActionBar::new(buffer_search_bar, workspace, cx)); toolbar.add_item(quick_action_bar, cx); From b701eab44f0728d90d2d65b3bba20263e16897ad Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 26 Sep 2024 01:31:17 -0600 Subject: [PATCH 341/762] Avoid unwrap in file finder (#18374) Release Notes: - Fixed a (rare) panic in file finder --------- Co-authored-by: Kirill Bulatov --- crates/file_finder/src/file_finder.rs | 28 +++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 1a65bd352d61d7..f63c499ee84c72 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -394,7 +394,7 @@ fn matching_history_items<'a>( .chars(), ), }; - candidates_paths.insert(Arc::clone(&found_path.project.path), found_path); + candidates_paths.insert(&found_path.project, found_path); Some((found_path.project.worktree_id, candidate)) }) .fold( @@ -419,17 +419,21 @@ fn matching_history_items<'a>( max_results, ) .into_iter() - .map(|path_match| { - let (_, found_path) = candidates_paths - .remove_entry(&path_match.path) - .expect("candidate info not found"); - ( - Arc::clone(&path_match.path), - Match::History { - path: found_path.clone(), - panel_match: Some(ProjectPanelOrdMatch(path_match)), - }, - ) + .filter_map(|path_match| { + candidates_paths + .remove_entry(&ProjectPath { + worktree_id: WorktreeId::from_usize(path_match.worktree_id), + path: Arc::clone(&path_match.path), + }) + .map(|(_, found_path)| { + ( + Arc::clone(&path_match.path), + Match::History { + path: found_path.clone(), + panel_match: Some(ProjectPanelOrdMatch(path_match)), + }, + ) + }) }), ); } From 2d2e20f9d426709f8cebfc7321866880834db4a3 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 26 Sep 2024 11:07:07 +0200 Subject: [PATCH 342/762] editor: Fix cursor shape not restoring when setting removed (#18379) Closes #18119 Release Notes: - Fixed the cursor shape in the editor not changing back to default when `{"cursor_shape": "..."}` setting is removed. (Does not apply to Vim mode.) --- crates/editor/src/editor.rs | 18 +++++++++++++----- crates/vim/src/vim.rs | 7 +++++++ 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 23448b43a7cc72..6e5543132c8e78 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11925,12 +11925,19 @@ impl Editor { )), cx, ); - let editor_settings = EditorSettings::get_global(cx); - if let Some(cursor_shape) = editor_settings.cursor_shape { - self.cursor_shape = cursor_shape; + + let old_cursor_shape = self.cursor_shape; + + { + let editor_settings = EditorSettings::get_global(cx); + self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; + self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; + self.cursor_shape = editor_settings.cursor_shape.unwrap_or_default(); + } + + if old_cursor_shape != self.cursor_shape { + cx.emit(EditorEvent::CursorShapeChanged); } - self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin; - self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs; let project_settings = ProjectSettings::get_global(cx); self.serialize_dirty_buffers = project_settings.session.restore_unsaved_buffers; @@ -13127,6 +13134,7 @@ pub enum EditorEvent { TransactionBegun { transaction_id: clock::Lamport, }, + CursorShapeChanged, } impl EventEmitter for Editor {} diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 701972c19bb614..06116bff99de48 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -389,6 +389,7 @@ impl Vim { } EditorEvent::Edited { .. } => self.push_to_change_list(cx), EditorEvent::FocusedIn => self.sync_vim_settings(cx), + EditorEvent::CursorShapeChanged => self.cursor_shape_changed(cx), _ => {} } } @@ -679,6 +680,12 @@ impl Vim { }); } + fn cursor_shape_changed(&mut self, cx: &mut ViewContext) { + self.update_editor(cx, |vim, editor, cx| { + editor.set_cursor_shape(vim.cursor_shape(), cx); + }); + } + fn update_editor( &mut self, cx: &mut ViewContext, From b9b689d3221b6bcbea349b98a480d8e8f87fa802 Mon Sep 17 00:00:00 2001 From: "Hyunmin Woo (Hanul)" Date: Thu, 26 Sep 2024 19:24:29 +0900 Subject: [PATCH 343/762] Fix Typo in rust language guide (#18383) Release Notes: - N/A --- docs/src/languages/rust.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 02e90d60a403b3..330b5fa9d0151d 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -23,8 +23,8 @@ The following configuration can be used to change the inlay hint settings for `r "inlayHints": { "maxLength": null, "lifetimeElisionHints": { - "enable": "skip_trivial" - "useParameterNames": true, + "enable": "skip_trivial", + "useParameterNames": true }, "closureReturnTypeHints": { "enable": "always" From 140d70289e54328509f59d9de2fefd8e8b35bec0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 26 Sep 2024 12:26:58 +0200 Subject: [PATCH 344/762] Avoid panic by only restoring workspace if UI has launched (#18386) This should fix the `unregistered setting type workspace::workspace_settings::WorkspaceSettings` panic that came from inside `restorable_workspace_locations`. We tracked it down to a possible scenario (we can't recreate it though) in which `app.on_reopen` is called before the app has finished launching. In any case, this check makes sense, because we only want to restore a workspace in case the whole app has launched with a UI. Release Notes: - N/A Co-authored-by: Bennet --- crates/zed/src/main.rs | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 0f37e06f438f92..186805d12cd01d 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -425,15 +425,22 @@ fn main() { app.on_reopen(move |cx| { if let Some(app_state) = AppState::try_global(cx).and_then(|app_state| app_state.upgrade()) { - cx.spawn({ - let app_state = app_state.clone(); - |mut cx| async move { - if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await { - fail_to_open_window_async(e, &mut cx) + let ui_has_launched = cx + .try_global::() + .map(|mode| matches!(mode, AppMode::Ui)) + .unwrap_or(false); + + if ui_has_launched { + cx.spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await { + fail_to_open_window_async(e, &mut cx) + } } - } - }) - .detach(); + }) + .detach(); + } } }); From 3f415f3587b12cc4745300e4c129f252649356e2 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 26 Sep 2024 12:27:08 +0200 Subject: [PATCH 345/762] Fix `use_on_type_format` setting being unused per language (#18387) Before this change, `use_on_type_format` would only have an effect when defined on a global level in our settings. But our default.json settings would also document that it's used in language settings, i.e.: ```json { "languages": { "C": { "use_on_type_format": false }, "C++": { "use_on_type_format": false } } } ``` But this did **not** work. With the change, it now works globally and per-language. Release Notes: - Fixed `use_on_type_format` setting not working when defined inside `"languages"` in the settings. This change will now change the default behavior for C, C++, and Markdown, by turning language server's `OnTypeFormatting` completions off by default. Co-authored-by: Bennet --- crates/editor/src/editor.rs | 11 ++++++++++- crates/editor/src/editor_settings.rs | 6 ------ crates/language/src/language_settings.rs | 9 +++++++++ 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 6e5543132c8e78..730482b123bf77 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3442,7 +3442,7 @@ impl Editor { s.select(new_selections) }); - if !bracket_inserted && EditorSettings::get_global(cx).use_on_type_format { + if !bracket_inserted { if let Some(on_type_format_task) = this.trigger_on_type_formatting(text.to_string(), cx) { @@ -4191,6 +4191,15 @@ impl Editor { .read(cx) .text_anchor_for_position(position, cx)?; + let settings = language_settings::language_settings( + buffer.read(cx).language_at(buffer_position).as_ref(), + buffer.read(cx).file(), + cx, + ); + if !settings.use_on_type_format { + return None; + } + // OnTypeFormatting returns a list of edits, no need to pass them between Zed instances, // hence we do LSP request & edit on host side only — add formats to host's history. let push_to_lsp_host_history = true; diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index d651e76c2c2e7d..9137629241468c 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -13,7 +13,6 @@ pub struct EditorSettings { pub show_completions_on_input: bool, pub show_completion_documentation: bool, pub completion_documentation_secondary_query_debounce: u64, - pub use_on_type_format: bool, pub toolbar: Toolbar, pub scrollbar: Scrollbar, pub gutter: Gutter, @@ -209,11 +208,6 @@ pub struct EditorSettingsContent { /// /// Default: 300 ms pub completion_documentation_secondary_query_debounce: Option, - /// Whether to use additional LSP queries to format (and amend) the code after - /// every "trigger" symbol input, defined by LSP server capabilities. - /// - /// Default: true - pub use_on_type_format: Option, /// Toolbar related settings pub toolbar: Option, /// Scrollbar related settings diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 735a9a60f87fa6..f830c5f25c308c 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -113,6 +113,9 @@ pub struct LanguageSettings { pub use_autoclose: bool, /// Whether to automatically surround text with brackets. pub use_auto_surround: bool, + /// Whether to use additional LSP queries to format (and amend) the code after + /// every "trigger" symbol input, defined by LSP server capabilities. + pub use_on_type_format: bool, // Controls how the editor handles the autoclosed characters. pub always_treat_brackets_as_autoclosed: bool, /// Which code actions to run on save @@ -333,6 +336,11 @@ pub struct LanguageSettingsContent { /// /// Default: false pub always_treat_brackets_as_autoclosed: Option, + /// Whether to use additional LSP queries to format (and amend) the code after + /// every "trigger" symbol input, defined by LSP server capabilities. + /// + /// Default: true + pub use_on_type_format: Option, /// Which code actions to run on save after the formatter. /// These are not run if formatting is off. /// @@ -1045,6 +1053,7 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent merge(&mut settings.soft_wrap, src.soft_wrap); merge(&mut settings.use_autoclose, src.use_autoclose); merge(&mut settings.use_auto_surround, src.use_auto_surround); + merge(&mut settings.use_on_type_format, src.use_on_type_format); merge( &mut settings.always_treat_brackets_as_autoclosed, src.always_treat_brackets_as_autoclosed, From 31902a1b73ce99934b8580cdacc2dd51eb87a046 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 26 Sep 2024 12:52:56 +0200 Subject: [PATCH 346/762] Remove leftover println statements (#18389) Remove some leftover println statements from #17644 Release Notes: - N/A --- crates/vim/src/normal/increment.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 6d66e380c30b80..b0501eeef7da3c 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -213,8 +213,6 @@ fn find_number( begin = Some(offset); } num.push(ch); - println!("pushing {}", ch); - println!(); } else if begin.is_some() { end = Some(offset); break; From db92a31067c8a6e6d889a63326decdd42f9de6c2 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 26 Sep 2024 13:18:50 +0200 Subject: [PATCH 347/762] lsp: Do not notify all language servers on file save (#17756) This is not an ideal solution to https://github.com/fasterthanlime/zed-diags-readme, but current status quo is not great either; we were just going through all of the language servers and notifying them, whereas we should ideally do it based on a glob. /cc @fasterthanlime Release Notes: - N/A --- crates/project/src/lsp_store.rs | 16 +++++++++ crates/project/src/project_tests.rs | 55 ++++++++++++++++++++++++----- 2 files changed, 63 insertions(+), 8 deletions(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index a4a13b296ed5c1..37922b7c2ee03b 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -2892,11 +2892,27 @@ impl LspStore { let file = File::from_dyn(buffer.read(cx).file())?; let worktree_id = file.worktree_id(cx); let abs_path = file.as_local()?.abs_path(cx); + let worktree_path = file.as_local()?.path(); let text_document = lsp::TextDocumentIdentifier { uri: lsp::Url::from_file_path(abs_path).log_err()?, }; + let watched_paths_for_server = &self.as_local()?.language_server_watched_paths; for server in self.language_servers_for_worktree(worktree_id) { + let should_notify = maybe!({ + Some( + watched_paths_for_server + .get(&server.server_id())? + .read(cx) + .worktree_paths + .get(&worktree_id)? + .is_match(worktree_path), + ) + }) + .unwrap_or_default(); + if !should_notify { + continue; + } if let Some(include_text) = include_text(server.as_ref()) { let text = if include_text { Some(buffer.read(cx).text()) diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 9e58caa2442439..dd14ccd60f4e98 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -386,6 +386,34 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // A server is started up, and it is notified about Rust files. let mut fake_rust_server = fake_rust_servers.next().await.unwrap(); + fake_rust_server + .request::(lsp::RegistrationParams { + registrations: vec![lsp::Registration { + id: Default::default(), + method: "workspace/didChangeWatchedFiles".to_string(), + register_options: serde_json::to_value( + lsp::DidChangeWatchedFilesRegistrationOptions { + watchers: vec![ + lsp::FileSystemWatcher { + glob_pattern: lsp::GlobPattern::String( + "/the-root/Cargo.toml".to_string(), + ), + kind: None, + }, + lsp::FileSystemWatcher { + glob_pattern: lsp::GlobPattern::String( + "/the-root/*.rs".to_string(), + ), + kind: None, + }, + ], + }, + ) + .ok(), + }], + }) + .await + .unwrap(); assert_eq!( fake_rust_server .receive_notification::() @@ -433,6 +461,24 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { // A json language server is started up and is only notified about the json buffer. let mut fake_json_server = fake_json_servers.next().await.unwrap(); + fake_json_server + .request::(lsp::RegistrationParams { + registrations: vec![lsp::Registration { + id: Default::default(), + method: "workspace/didChangeWatchedFiles".to_string(), + register_options: serde_json::to_value( + lsp::DidChangeWatchedFilesRegistrationOptions { + watchers: vec![lsp::FileSystemWatcher { + glob_pattern: lsp::GlobPattern::String("/the-root/*.json".to_string()), + kind: None, + }], + }, + ) + .ok(), + }], + }) + .await + .unwrap(); assert_eq!( fake_json_server .receive_notification::() @@ -483,7 +529,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { ) ); - // Save notifications are reported to all servers. + // Save notifications are reported only to servers that signed up for a given extension. project .update(cx, |project, cx| project.save_buffer(toml_buffer, cx)) .await @@ -495,13 +541,6 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { .text_document, lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()) ); - assert_eq!( - fake_json_server - .receive_notification::() - .await - .text_document, - lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()) - ); // Renames are reported only to servers matching the buffer's language. fs.rename( From 1deed247eb37080b6db5d84ed5054214691554e9 Mon Sep 17 00:00:00 2001 From: Taras Martyniuk Date: Thu, 26 Sep 2024 15:36:58 +0300 Subject: [PATCH 348/762] terraform: Bump to v0.1.1 (#18382) This PR bumps the Terraform extension to v0.1.1 - https://github.com/zed-industries/zed/pull/17200 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/terraform/Cargo.toml | 2 +- extensions/terraform/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0b3ee53e9aa852..5138d59e277280 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14687,7 +14687,7 @@ dependencies = [ [[package]] name = "zed_terraform" -version = "0.1.0" +version = "0.1.1" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/terraform/Cargo.toml b/extensions/terraform/Cargo.toml index 7892b68466cc6e..56ae621e167efc 100644 --- a/extensions/terraform/Cargo.toml +++ b/extensions/terraform/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_terraform" -version = "0.1.0" +version = "0.1.1" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/terraform/extension.toml b/extensions/terraform/extension.toml index 80fe03fc04431e..fc96f773e9b238 100644 --- a/extensions/terraform/extension.toml +++ b/extensions/terraform/extension.toml @@ -1,7 +1,7 @@ id = "terraform" name = "Terraform" description = "Terraform support." -version = "0.1.0" +version = "0.1.1" schema_version = 1 authors = ["Caius Durling ", "Daniel Banck "] repository = "https://github.com/zed-industries/zed" From 1a4f9b289130593d77db0759be738624203731e1 Mon Sep 17 00:00:00 2001 From: Galen Elias Date: Thu, 26 Sep 2024 06:30:06 -0700 Subject: [PATCH 349/762] Fix minimum gutter line number spacing (#18021) I was inspecting how Zed did the layout in the editor, specifically for the gutter, and noticed that `em_width * X` is being used as the 'width of X consecutive characters'. Howevever, that math didn't work for me, because em_width doesn't account for the space between characters, so you can't just multiply it by a character count. One place this is actually noticeable is in the logic for `min_width_for_number_on_gutter`, where we try to reserve 4 characters of line number space. However, once you actually hit 4 characters, the actual width is bigger, causing things to resize. This seems clearly counter to the intent of the code. It seems the more correct logic is to use `em_advance` which accounts for the space between the characters. I am leaving the rest of the uses of `em_width` for generic padding. It is also possible that `column_pixels()` would be the more correct fix here, but it wasn't straightforward to use that due to it residing EditorElement source file. On my MacBook this increases the width of the gutter by 6 pixels when there are <999 lines in the file, otherwise it's identical. It might be worth doing some more general audit of some of the other uses of em_width as a concept. (e.g. `git_blame_entries_width`) https://github.com/user-attachments/assets/f2a28cd5-9bb6-4109-bf41-1838e56a75f9 Release Notes: - Fix a slight gutter flicker when going over 999 lines --- crates/editor/src/editor.rs | 3 ++- crates/editor/src/element.rs | 16 ++++++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 730482b123bf77..102e94f1abb41e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12956,6 +12956,7 @@ impl EditorSnapshot { font_id: FontId, font_size: Pixels, em_width: Pixels, + em_advance: Pixels, max_line_number_width: Pixels, cx: &AppContext, ) -> GutterDimensions { @@ -12976,7 +12977,7 @@ impl EditorSnapshot { .unwrap_or(gutter_settings.line_numbers); let line_gutter_width = if show_line_numbers { // Avoid flicker-like gutter resizes when the line number gains another digit and only resize the gutter on files with N*10^5 lines. - let min_width_for_number_on_gutter = em_width * 4.0; + let min_width_for_number_on_gutter = em_advance * 4.0; max_line_number_width.max(min_width_for_number_on_gutter) } else { 0.0.into() diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 9fe05bc4f26063..f5db7b94ba8ad5 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -4970,6 +4970,7 @@ impl Element for EditorElement { font_id, font_size, em_width, + em_advance, self.max_line_number_width(&snapshot, cx), cx, ); @@ -6283,10 +6284,21 @@ fn compute_auto_height_layout( .unwrap() .size .width; + let em_advance = cx + .text_system() + .advance(font_id, font_size, 'm') + .unwrap() + .width; let mut snapshot = editor.snapshot(cx); - let gutter_dimensions = - snapshot.gutter_dimensions(font_id, font_size, em_width, max_line_number_width, cx); + let gutter_dimensions = snapshot.gutter_dimensions( + font_id, + font_size, + em_width, + em_advance, + max_line_number_width, + cx, + ); editor.gutter_dimensions = gutter_dimensions; let text_width = width - gutter_dimensions.width; From 7eea1a6f51b11bd56150203809f21da7e8b5530d Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 26 Sep 2024 15:47:14 +0200 Subject: [PATCH 350/762] git blame gutter: Use smallest possible space (#18145) Before: ![screenshot-2024-09-26-15 00 20@2x](https://github.com/user-attachments/assets/f6706325-5bef-404e-a0b4-63a5121969fa) After: ![screenshot-2024-09-26-15 02 24@2x](https://github.com/user-attachments/assets/739d0831-0b4a-457f-917e-10f3a662e74d) Release Notes: - Improved the git blame gutter to take up only the space required to display the longest git author name in the current file. --------- Co-authored-by: Bennet Bo Fenner --- crates/editor/src/editor.rs | 35 ++++++++++++++++++++++++++++------ crates/editor/src/element.rs | 31 +++++++++++++++--------------- crates/editor/src/git/blame.rs | 21 ++++++++++++++++++++ 3 files changed, 65 insertions(+), 22 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 102e94f1abb41e..54d23a8219d4f4 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -663,7 +663,7 @@ pub struct EditorSnapshot { show_git_diff_gutter: Option, show_code_actions: Option, show_runnables: Option, - render_git_blame_gutter: bool, + git_blame_gutter_max_author_length: Option, pub display_snapshot: DisplaySnapshot, pub placeholder_text: Option>, is_focused: bool, @@ -673,7 +673,7 @@ pub struct EditorSnapshot { gutter_hovered: bool, } -const GIT_BLAME_GUTTER_WIDTH_CHARS: f32 = 53.; +const GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED: usize = 20; #[derive(Default, Debug, Clone, Copy)] pub struct GutterDimensions { @@ -2211,6 +2211,19 @@ impl Editor { } pub fn snapshot(&mut self, cx: &mut WindowContext) -> EditorSnapshot { + let git_blame_gutter_max_author_length = self + .render_git_blame_gutter(cx) + .then(|| { + if let Some(blame) = self.blame.as_ref() { + let max_author_length = + blame.update(cx, |blame, cx| blame.max_author_length(cx)); + Some(max_author_length) + } else { + None + } + }) + .flatten(); + EditorSnapshot { mode: self.mode, show_gutter: self.show_gutter, @@ -2218,7 +2231,7 @@ impl Editor { show_git_diff_gutter: self.show_git_diff_gutter, show_code_actions: self.show_code_actions, show_runnables: self.show_runnables, - render_git_blame_gutter: self.render_git_blame_gutter(cx), + git_blame_gutter_max_author_length, display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)), scroll_anchor: self.scroll_manager.anchor(), ongoing_scroll: self.scroll_manager.ongoing_scroll(), @@ -12989,9 +13002,19 @@ impl EditorSnapshot { let show_runnables = self.show_runnables.unwrap_or(gutter_settings.runnables); - let git_blame_entries_width = self - .render_git_blame_gutter - .then_some(em_width * GIT_BLAME_GUTTER_WIDTH_CHARS); + let git_blame_entries_width = + self.git_blame_gutter_max_author_length + .map(|max_author_length| { + // Length of the author name, but also space for the commit hash, + // the spacing and the timestamp. + let max_char_count = max_author_length + .min(GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED) + + 7 // length of commit sha + + 14 // length of max relative timestamp ("60 minutes ago") + + 4; // gaps and margins + + em_advance * max_char_count + }); let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO); left_padding += if show_code_actions || show_runnables { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index f5db7b94ba8ad5..6f30062d47ec77 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -21,7 +21,7 @@ use crate::{ EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, HoveredHunk, LineDown, LineUp, OpenExcerpts, PageDown, PageUp, Point, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, ToPoint, - CURSORS_VISIBLE_FOR, MAX_LINE_LEN, + CURSORS_VISIBLE_FOR, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED, MAX_LINE_LEN, }; use client::ParticipantIndex; use collections::{BTreeMap, HashMap}; @@ -1445,7 +1445,7 @@ impl EditorElement { AvailableSpace::MaxContent }; let scroll_top = scroll_position.y * line_height; - let start_x = em_width * 1; + let start_x = em_width; let mut last_used_color: Option<(PlayerColor, Oid)> = None; @@ -4228,7 +4228,7 @@ fn render_blame_entry( let short_commit_id = blame_entry.sha.display_short(); let author_name = blame_entry.author.as_deref().unwrap_or(""); - let name = util::truncate_and_trailoff(author_name, 20); + let name = util::truncate_and_trailoff(author_name, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED); let details = blame.read(cx).details_for_entry(&blame_entry); @@ -4240,22 +4240,21 @@ fn render_blame_entry( h_flex() .w_full() + .justify_between() .font_family(style.text.font().family) .line_height(style.text.line_height) .id(("blame", ix)) - .children([ - div() - .text_color(sha_color.cursor) - .child(short_commit_id) - .mr_2(), - div() - .w_full() - .h_flex() - .justify_between() - .text_color(cx.theme().status().hint) - .child(name) - .child(relative_timestamp), - ]) + .text_color(cx.theme().status().hint) + .pr_2() + .gap_2() + .child( + h_flex() + .items_center() + .gap_2() + .child(div().text_color(sha_color.cursor).child(short_commit_id)) + .child(name), + ) + .child(relative_timestamp) .on_mouse_down(MouseButton::Right, { let blame_entry = blame_entry.clone(); let details = details.clone(); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 775cbcc379e128..733d42d0c57dda 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -207,6 +207,27 @@ impl GitBlame { }) } + pub fn max_author_length(&mut self, cx: &mut ModelContext) -> usize { + self.sync(cx); + + let mut max_author_length = 0; + + for entry in self.entries.iter() { + let author_len = entry + .blame + .as_ref() + .and_then(|entry| entry.author.as_ref()) + .map(|author| author.len()); + if let Some(author_len) = author_len { + if author_len > max_author_length { + max_author_length = author_len; + } + } + } + + max_author_length + } + pub fn blur(&mut self, _: &mut ModelContext) { self.focused = false; } From f143396825c89b96076087ff80630d0b50eeb6cb Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 26 Sep 2024 16:24:11 +0200 Subject: [PATCH 351/762] ssh: Do not require user to be signed in to use ssh remoting (#18396) Fixes #18392 Closes #18392 Release Notes: - N/A --- crates/recent_projects/src/dev_servers.rs | 127 +++++++++------------- 1 file changed, 51 insertions(+), 76 deletions(-) diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index af5f51f14fca2c..2038d069b4c68a 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -40,7 +40,6 @@ use ui::{ }; use ui_input::{FieldLabelLayout, TextField}; use util::ResultExt; -use workspace::notifications::NotifyResultExt; use workspace::OpenOptions; use workspace::{notifications::DetachAndPromptErr, AppState, ModalView, Workspace, WORKSPACE_DB}; @@ -1133,7 +1132,8 @@ impl DevServerProjects { let dev_server_id = state.dev_server_id; let access_token = state.access_token.clone(); let ssh_prompt = state.ssh_prompt.clone(); - let use_direct_ssh = SshSettings::get_global(cx).use_direct_ssh(); + let use_direct_ssh = SshSettings::get_global(cx).use_direct_ssh() + || Client::global(cx).status().borrow().is_signed_out(); let mut kind = state.kind; if use_direct_ssh && kind == NewServerKind::LegacySSH { @@ -1407,7 +1407,6 @@ impl DevServerProjects { is_creating = Some(*creating); creating_dev_server = Some(*dev_server_id); }; - let is_signed_out = Client::global(cx).status().borrow().is_signed_out(); Modal::new("remote-projects", Some(self.scroll_handle.clone())) .header( @@ -1415,82 +1414,58 @@ impl DevServerProjects { .show_dismiss_button(true) .child(Headline::new("Remote Projects (alpha)").size(HeadlineSize::Small)), ) - .when(is_signed_out, |modal| { - modal - .section(Section::new().child(div().child(Label::new( - "To continue with the remote development features, you need to sign in to Zed.", - )))) - .footer( - ModalFooter::new().end_slot( - Button::new("sign_in", "Sign in with GitHub") - .icon(IconName::Github) - .icon_position(IconPosition::Start) - .full_width() - .on_click(cx.listener(|_, _, cx| { - let client = Client::global(cx).clone(); - cx.spawn(|_, mut cx| async move { - client - .authenticate_and_connect(true, &cx) - .await - .notify_async_err(&mut cx); - }) - .detach(); - cx.emit(gpui::DismissEvent); - })), - ), - ) - }) - .when(!is_signed_out, |modal| { - modal.section( - Section::new().child( - div().child( - List::new() - .empty_message("No dev servers registered yet.") - .header(Some( - ListHeader::new("Connections").end_slot( - Button::new("register-dev-server-button", "Connect New Server") - .icon(IconName::Plus) - .icon_position(IconPosition::Start) - .icon_color(Color::Muted) - .on_click(cx.listener(|this, _, cx| { - this.mode = Mode::CreateDevServer( - CreateDevServer { - kind: if SshSettings::get_global(cx).use_direct_ssh() { NewServerKind::DirectSSH } else { NewServerKind::LegacySSH }, - ..Default::default() - } - ); - this.dev_server_name_input.update( - cx, - |text_field, cx| { - text_field.editor().update( - cx, - |editor, cx| { - editor.set_text("", cx); - }, - ); - }, - ); - cx.notify(); - })), - ), - )) - .children(ssh_connections.iter().cloned().enumerate().map(|(ix, connection)| { + .section( + Section::new().child( + div().child( + List::new() + .empty_message("No dev servers registered yet.") + .header(Some( + ListHeader::new("Connections").end_slot( + Button::new("register-dev-server-button", "Connect New Server") + .icon(IconName::Plus) + .icon_position(IconPosition::Start) + .icon_color(Color::Muted) + .on_click(cx.listener(|this, _, cx| { + this.mode = Mode::CreateDevServer(CreateDevServer { + kind: if SshSettings::get_global(cx) + .use_direct_ssh() + { + NewServerKind::DirectSSH + } else { + NewServerKind::LegacySSH + }, + ..Default::default() + }); + this.dev_server_name_input.update( + cx, + |text_field, cx| { + text_field.editor().update(cx, |editor, cx| { + editor.set_text("", cx); + }); + }, + ); + cx.notify(); + })), + ), + )) + .children(ssh_connections.iter().cloned().enumerate().map( + |(ix, connection)| { self.render_ssh_connection(ix, connection, cx) .into_any_element() - })) - .children(dev_servers.iter().map(|dev_server| { - let creating = if creating_dev_server == Some(dev_server.id) { - is_creating - } else { - None - }; - self.render_dev_server(dev_server, creating, cx) - .into_any_element() - })), - ), + }, + )) + .children(dev_servers.iter().map(|dev_server| { + let creating = if creating_dev_server == Some(dev_server.id) { + is_creating + } else { + None + }; + self.render_dev_server(dev_server, creating, cx) + .into_any_element() + })), ), - ) - }) + ), + ) } } From de1889d6a864a1add60d63d08dd5d293f74a340d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 08:49:50 -0600 Subject: [PATCH 352/762] Update Rust crate async-trait to v0.1.83 (#18364) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [async-trait](https://redirect.github.com/dtolnay/async-trait) | workspace.dependencies | patch | `0.1.82` -> `0.1.83` | --- ### Release Notes
dtolnay/async-trait (async-trait) ### [`v0.1.83`](https://redirect.github.com/dtolnay/async-trait/releases/tag/0.1.83) [Compare Source](https://redirect.github.com/dtolnay/async-trait/compare/0.1.82...0.1.83) - Prevent needless_arbitrary_self_type lint being produced in generated code ([#​278](https://redirect.github.com/dtolnay/async-trait/issues/278))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5138d59e277280..4826b312f103eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -895,9 +895,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.82" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", From 82eb753b31426da8d5b9aacdadf0512678f7b1d5 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 11:24:01 -0400 Subject: [PATCH 353/762] Update actions/setup-node digest to 0a44ba7 (#18357) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [actions/setup-node](https://redirect.github.com/actions/setup-node) | action | digest | `1e60f62` -> `0a44ba7` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/actions/run_tests/action.yml | 2 +- .github/workflows/ci.yml | 2 +- .github/workflows/danger.yml | 2 +- .github/workflows/randomized_tests.yml | 2 +- .github/workflows/release_nightly.yml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/actions/run_tests/action.yml b/.github/actions/run_tests/action.yml index 815953398ba5b5..07284e2f5854ac 100644 --- a/.github/actions/run_tests/action.yml +++ b/.github/actions/run_tests/action.yml @@ -10,7 +10,7 @@ runs: cargo install cargo-nextest - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f059b470040129..07e5499d5eb76c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -172,7 +172,7 @@ jobs: DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} steps: - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml index 8ff35b9e26da59..0278bbce02154b 100644 --- a/.github/workflows/danger.yml +++ b/.github/workflows/danger.yml @@ -21,7 +21,7 @@ jobs: version: 9 - name: Setup Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "20" cache: "pnpm" diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml index 57f43d4961f3dd..947b5059bd7120 100644 --- a/.github/workflows/randomized_tests.yml +++ b/.github/workflows/randomized_tests.yml @@ -22,7 +22,7 @@ jobs: - buildjet-16vcpu-ubuntu-2204 steps: - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 2b973dcddc3d6f..4e8a257bdd1bb8 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -70,7 +70,7 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Install Node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4 with: node-version: "18" From e5bbd378a61ed719a8635c49bafdfb103d1b33d8 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 11:44:38 -0400 Subject: [PATCH 354/762] Update Rust crate cargo_toml to v0.20.5 (#18365) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [cargo_toml](https://lib.rs/cargo_toml) ([source](https://gitlab.com/lib.rs/cargo_toml)) | workspace.dependencies | patch | `0.20.4` -> `0.20.5` | --- ### Release Notes
lib.rs/cargo_toml (cargo_toml) ### [`v0.20.5`](https://gitlab.com/lib.rs/cargo_toml/compare/v0.20.4...v0.20.5) [Compare Source](https://gitlab.com/lib.rs/cargo_toml/compare/v0.20.4...v0.20.5)
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4826b312f103eb..94b8205329d344 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2087,9 +2087,9 @@ dependencies = [ [[package]] name = "cargo_toml" -version = "0.20.4" +version = "0.20.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad639525b1c67b6a298f378417b060fbc04618bea559482a8484381cce27d965" +checksum = "88da5a13c620b4ca0078845707ea9c3faf11edbc3ffd8497d11d686211cd1ac0" dependencies = [ "serde", "toml 0.8.19", From 84a6ded657ecf8ced4fce6ad0e6485200a2a4c47 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 11:52:12 -0400 Subject: [PATCH 355/762] Update Rust crate clap to v4.5.18 (#18369) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [clap](https://redirect.github.com/clap-rs/clap) | workspace.dependencies | patch | `4.5.17` -> `4.5.18` | --- ### Release Notes
clap-rs/clap (clap) ### [`v4.5.18`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4518---2024-09-20) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.17...v4.5.18) ##### Features - *(builder)* Expose `Arg::get_display_order` and `Command::get_display_order`
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 94b8205329d344..7cd40d4226d636 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2283,9 +2283,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.17" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" +checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3" dependencies = [ "clap_builder", "clap_derive", @@ -2293,9 +2293,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.17" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" +checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b" dependencies = [ "anstream", "anstyle", @@ -2315,9 +2315,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.13" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ "heck 0.5.0", "proc-macro2", @@ -6478,7 +6478,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -13536,7 +13536,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] From c7a79cfc02bcacfb3983a9d8b3ad15bb06d33380 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 12:16:49 -0400 Subject: [PATCH 356/762] Update Rust crate libc to v0.2.159 (#18370) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [libc](https://redirect.github.com/rust-lang/libc) | workspace.dependencies | patch | `0.2.158` -> `0.2.159` | --- ### Release Notes
rust-lang/libc (libc) ### [`v0.2.159`](https://redirect.github.com/rust-lang/libc/releases/tag/0.2.159) [Compare Source](https://redirect.github.com/rust-lang/libc/compare/0.2.158...0.2.159) ##### Added - Android: add more `AT_*` constants in [#​3779](https://redirect.github.com/rust-lang/libc/pull/3779) - Apple: add missing `NOTE_*` constants in [#​3883](https://redirect.github.com/rust-lang/libc/pull/3883) - Hermit: add missing error numbers in [#​3858](https://redirect.github.com/rust-lang/libc/pull/3858) - Hurd: add `__timeval` for 64-bit support in [#​3786](https://redirect.github.com/rust-lang/libc/pull/3786) - Linux: add `epoll_pwait2` in [#​3868](https://redirect.github.com/rust-lang/libc/pull/3868) - Linux: add `mq_notify` in [#​3849](https://redirect.github.com/rust-lang/libc/pull/3849) - Linux: add missing `NFT_CT_*` constants in [#​3844](https://redirect.github.com/rust-lang/libc/pull/3844) - Linux: add the `fchmodat2` syscall in [#​3588](https://redirect.github.com/rust-lang/libc/pull/3588) - Linux: add the `mseal` syscall in [#​3798](https://redirect.github.com/rust-lang/libc/pull/3798) - OpenBSD: add `sendmmsg` and `recvmmsg` in [#​3831](https://redirect.github.com/rust-lang/libc/pull/3831) - Unix: add `IN6ADDR_ANY_INIT` and `IN6ADDR_LOOPBACK_INIT` in [#​3693](https://redirect.github.com/rust-lang/libc/pull/3693) - VxWorks: add `S_ISVTX` in [#​3768](https://redirect.github.com/rust-lang/libc/pull/3768) - VxWorks: add `vxCpuLib` and `taskLib` functions [#​3861](https://redirect.github.com/rust-lang/libc/pull/3861) - WASIp2: add definitions for `std::net` support in [#​3892](https://redirect.github.com/rust-lang/libc/pull/3892) ##### Fixed - Correctly handle version checks when `clippy-driver` is used [#​3893](https://redirect.github.com/rust-lang/libc/pull/3893) ##### Changed - EspIdf: change signal constants to c_int in [#​3895](https://redirect.github.com/rust-lang/libc/pull/3895) - HorizonOS: update network definitions in [#​3863](https://redirect.github.com/rust-lang/libc/pull/3863) - Linux: combine `ioctl` APIs in [#​3722](https://redirect.github.com/rust-lang/libc/pull/3722) - WASI: enable CI testing in [#​3869](https://redirect.github.com/rust-lang/libc/pull/3869) - WASIp2: enable CI testing in [#​3870](https://redirect.github.com/rust-lang/libc/pull/3870)
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7cd40d4226d636..68dad1f74613f1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6434,9 +6434,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.159" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" [[package]] name = "libdbus-sys" From 11058765bec3bb1e0510254904276d29d4fc31f1 Mon Sep 17 00:00:00 2001 From: thataboy Date: Thu, 26 Sep 2024 09:48:23 -0700 Subject: [PATCH 357/762] Add ability to separately set background color for highlighted brackets (#17566) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/zed-industries/zed/issues/16380 Currently brackets are highlighted with `editor.document_highlight.read_background`. This commit adds a separate `editor.document_highlight.bracket_background` theme setting so bracket highlights can be made more prominent without doing the same to other highlights, making the display too busy. (My own theme) https://github.com/user-attachments/assets/29a8c05e-2f1a-4c16-9be8-a4b4cb143548 I set defaults for light and dark theme that I hope are sensible and not too obnoxious, but noticeable so people can change it if they don't like it. Release Notes: - Added `editor.document_highlight.bracket_background` field to the theme to set background color of highlighted brackets. - This will fall back to `editor.document_highlight.read_background`, if not set. Screenshot 2024-09-08 at 8 46 57 AM Screenshot 2024-09-08 at 9 03 27 AM --------- Co-authored-by: Marshall Bowers --- .../editor/src/highlight_matching_bracket.rs | 2 +- crates/theme/src/default_colors.rs | 2 ++ crates/theme/src/one_themes.rs | 1 + crates/theme/src/schema.rs | 21 +++++++++++++++---- crates/theme/src/styles/colors.rs | 4 ++++ 5 files changed, 25 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/highlight_matching_bracket.rs b/crates/editor/src/highlight_matching_bracket.rs index 67915d4d7b4990..f63b363f34f475 100644 --- a/crates/editor/src/highlight_matching_bracket.rs +++ b/crates/editor/src/highlight_matching_bracket.rs @@ -32,7 +32,7 @@ pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewCon opening_range.to_anchors(&snapshot.buffer_snapshot), closing_range.to_anchors(&snapshot.buffer_snapshot), ], - |theme| theme.editor_document_highlight_read_background, + |theme| theme.editor_document_highlight_bracket_background, cx, ) } diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index 4def0bb8d74d62..a7521bd374d1c3 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -80,6 +80,7 @@ impl ThemeColors { editor_indent_guide_active: neutral().light_alpha().step_6(), editor_document_highlight_read_background: neutral().light_alpha().step_3(), editor_document_highlight_write_background: neutral().light_alpha().step_4(), + editor_document_highlight_bracket_background: green().light_alpha().step_5(), terminal_background: neutral().light().step_1(), terminal_foreground: black().light().step_12(), terminal_bright_foreground: black().light().step_11(), @@ -179,6 +180,7 @@ impl ThemeColors { editor_indent_guide_active: neutral().dark_alpha().step_6(), editor_document_highlight_read_background: neutral().dark_alpha().step_4(), editor_document_highlight_write_background: neutral().dark_alpha().step_4(), + editor_document_highlight_bracket_background: green().dark_alpha().step_6(), terminal_background: neutral().dark().step_1(), terminal_ansi_background: neutral().dark().step_1(), terminal_foreground: white().dark().step_12(), diff --git a/crates/theme/src/one_themes.rs b/crates/theme/src/one_themes.rs index 69e69ce23dc8d1..50a4184e8bc934 100644 --- a/crates/theme/src/one_themes.rs +++ b/crates/theme/src/one_themes.rs @@ -102,6 +102,7 @@ pub(crate) fn one_dark() -> Theme { 0.2, ), editor_document_highlight_write_background: gpui::red(), + editor_document_highlight_bracket_background: gpui::green(), terminal_background: bg, // todo("Use one colors for terminal") diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index 0229b1ea98d590..91863061236f24 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -413,6 +413,12 @@ pub struct ThemeColorsContent { #[serde(rename = "editor.document_highlight.write_background")] pub editor_document_highlight_write_background: Option, + /// Highlighted brackets background color. + /// + /// Matching brackets in the cursor scope are highlighted with this background color. + #[serde(rename = "editor.document_highlight.bracket_background")] + pub editor_document_highlight_bracket_background: Option, + /// Terminal background color. #[serde(rename = "terminal.background")] pub terminal_background: Option, @@ -540,6 +546,10 @@ impl ThemeColorsContent { .border .as_ref() .and_then(|color| try_parse_color(color).ok()); + let editor_document_highlight_read_background = self + .editor_document_highlight_read_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()); ThemeColorsRefinement { border, border_variant: self @@ -784,14 +794,17 @@ impl ThemeColorsContent { .editor_indent_guide_active .as_ref() .and_then(|color| try_parse_color(color).ok()), - editor_document_highlight_read_background: self - .editor_document_highlight_read_background - .as_ref() - .and_then(|color| try_parse_color(color).ok()), + editor_document_highlight_read_background, editor_document_highlight_write_background: self .editor_document_highlight_write_background .as_ref() .and_then(|color| try_parse_color(color).ok()), + editor_document_highlight_bracket_background: self + .editor_document_highlight_bracket_background + .as_ref() + .and_then(|color| try_parse_color(color).ok()) + // Fall back to `editor.document_highlight.read_background`, for backwards compatibility. + .or(editor_document_highlight_read_background), terminal_background: self .terminal_background .as_ref() diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 0b37be09923c79..225275f37b6191 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -171,6 +171,10 @@ pub struct ThemeColors { /// special attention. Usually a document highlight is visualized by changing /// the background color of its range. pub editor_document_highlight_write_background: Hsla, + /// Highlighted brackets background color. + /// + /// Matching brackets in the cursor scope are highlighted with this background color. + pub editor_document_highlight_bracket_background: Hsla, // === // Terminal From 71da81c74326f1f9763803a6d1cd776b48b58125 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 26 Sep 2024 12:03:57 -0700 Subject: [PATCH 358/762] SSH Remoting: Fix bugs in worktree syncing (#18406) Release Notes: - N/A --------- Co-authored-by: conrad --- crates/collab/src/db/ids.rs | 1 + crates/collab/src/db/queries/projects.rs | 2 +- .../collab/src/tests/channel_buffer_tests.rs | 2 +- crates/collab/src/tests/editor_tests.rs | 30 ++++---- crates/collab/src/tests/following_tests.rs | 16 ++-- crates/collab/src/tests/integration_tests.rs | 74 +++++++++---------- .../remote_editing_collaboration_tests.rs | 25 ++++++- crates/collab/src/tests/test_server.rs | 2 +- crates/project/src/worktree_store.rs | 32 ++++---- .../remote_server/src/remote_editing_tests.rs | 42 +++++++++++ crates/worktree/src/worktree.rs | 7 +- script/zed-local | 16 ++-- 12 files changed, 157 insertions(+), 92 deletions(-) diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs index 1434bc07cf6c37..9bf767329d0024 100644 --- a/crates/collab/src/db/ids.rs +++ b/crates/collab/src/db/ids.rs @@ -32,6 +32,7 @@ macro_rules! id_type { #[allow(unused)] #[allow(missing_docs)] pub fn from_proto(value: u64) -> Self { + debug_assert!(value != 0); Self(value as i32) } diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index b514d4bb03601b..8091c6620570f2 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -285,7 +285,7 @@ impl Database { ) .one(&*tx) .await? - .ok_or_else(|| anyhow!("no such project"))?; + .ok_or_else(|| anyhow!("no such project: {project_id}"))?; // Update metadata. worktree::Entity::update(worktree::ActiveModel { diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index 1ba41c45bb6068..b5bfd0f03b9ec7 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -246,7 +246,7 @@ async fn test_channel_notes_participant_indices( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); // Clients A and B open the same file. diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index d2835edc619e20..f9bc21efb1abdf 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -76,7 +76,7 @@ async fn test_host_disconnect( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; cx_a.background_executor.run_until_parked(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); @@ -192,7 +192,7 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client A let buffer_a = project_a @@ -308,7 +308,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a file in an editor as the guest. let buffer_b = project_b @@ -565,7 +565,7 @@ async fn test_collaborating_with_code_actions( .unwrap(); // Join the project as client B. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update(cx_b, |workspace, cx| { @@ -780,7 +780,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b @@ -1030,7 +1030,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes .await .unwrap(); executor.run_until_parked(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; project_b.read_with(cx_b, |project, cx| { let status = project.language_server_statuses(cx).next().unwrap().1; @@ -1126,9 +1126,7 @@ async fn test_share_project( .await .unwrap(); let client_b_peer_id = client_b.peer_id().unwrap(); - let project_b = client_b - .build_dev_server_project(initial_project.id, cx_b) - .await; + let project_b = client_b.join_remote_project(initial_project.id, cx_b).await; let replica_id_b = project_b.read_with(cx_b, |project, _| project.replica_id()); @@ -1230,9 +1228,7 @@ async fn test_share_project( .update(cx_c, |call, cx| call.accept_incoming(cx)) .await .unwrap(); - let _project_c = client_c - .build_dev_server_project(initial_project.id, cx_c) - .await; + let _project_c = client_c.join_remote_project(initial_project.id, cx_c).await; // Client B closes the editor, and client A sees client B's selections removed. cx_b.update(move |_| drop(editor_b)); @@ -1291,7 +1287,7 @@ async fn test_on_input_format_from_host_to_guest( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a file in an editor as the host. let buffer_a = project_a @@ -1411,7 +1407,7 @@ async fn test_on_input_format_from_guest_to_host( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a file in an editor as the guest. let buffer_b = project_b @@ -1574,7 +1570,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .unwrap(); // Client B joins the project - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1836,7 +1832,7 @@ async fn test_inlay_hint_refresh_is_forwarded( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -2050,7 +2046,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA .unwrap(); // Join the project as client B. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update(cx_b, |workspace, cx| { diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index 9a39d6f3eb2e74..5e9c001491c6ce 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -74,7 +74,7 @@ async fn test_basic_following( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -162,7 +162,7 @@ async fn test_basic_following( executor.run_until_parked(); let active_call_c = cx_c.read(ActiveCall::global); - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c); active_call_c .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx)) @@ -175,7 +175,7 @@ async fn test_basic_following( cx_d.executor().run_until_parked(); let active_call_d = cx_d.read(ActiveCall::global); - let project_d = client_d.build_dev_server_project(project_id, cx_d).await; + let project_d = client_d.join_remote_project(project_id, cx_d).await; let (workspace_d, cx_d) = client_d.build_workspace(&project_d, cx_d); active_call_d .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx)) @@ -569,7 +569,7 @@ async fn test_following_tab_order( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -686,7 +686,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T .unwrap(); // Client B joins the project. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1199,7 +1199,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await @@ -1335,7 +1335,7 @@ async fn test_peers_simultaneously_following_each_other( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); executor.run_until_parked(); @@ -1685,7 +1685,7 @@ async fn test_following_into_excluded_file( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; active_call_b .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .await diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index d5cef3589cce33..afc3e7cfb84ee2 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -1372,7 +1372,7 @@ async fn test_unshare_project( .unwrap(); let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; executor.run_until_parked(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); @@ -1392,7 +1392,7 @@ async fn test_unshare_project( assert!(project_b.read_with(cx_b, |project, _| project.is_disconnected())); // Client C opens the project. - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; // When client A unshares the project, client C's project becomes read-only. project_a @@ -1409,7 +1409,7 @@ async fn test_unshare_project( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_c2 = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c2 = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); @@ -1514,9 +1514,9 @@ async fn test_project_reconnect( .await .unwrap(); - let project_b1 = client_b.build_dev_server_project(project1_id, cx_b).await; - let project_b2 = client_b.build_dev_server_project(project2_id, cx_b).await; - let project_b3 = client_b.build_dev_server_project(project3_id, cx_b).await; + let project_b1 = client_b.join_remote_project(project1_id, cx_b).await; + let project_b2 = client_b.join_remote_project(project2_id, cx_b).await; + let project_b3 = client_b.join_remote_project(project3_id, cx_b).await; executor.run_until_parked(); let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| { @@ -2310,8 +2310,8 @@ async fn test_propagate_saves_and_fs_changes( .unwrap(); // Join that worktree as clients B and C. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap()); @@ -2535,7 +2535,7 @@ async fn test_git_diff_base_change( .await .unwrap(); - let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + let project_remote = client_b.join_remote_project(project_id, cx_b).await; let diff_base = " one @@ -2791,7 +2791,7 @@ async fn test_git_branch_name( .await .unwrap(); - let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + let project_remote = client_b.join_remote_project(project_id, cx_b).await; client_a .fs() .set_branch_name(Path::new("/dir/.git"), Some("branch-1")); @@ -2836,7 +2836,7 @@ async fn test_git_branch_name( assert_branch(Some("branch-2"), project, cx) }); - let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_remote_c = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); project_remote_c.read_with(cx_c, |project, cx| { @@ -2891,7 +2891,7 @@ async fn test_git_status_sync( .await .unwrap(); - let project_remote = client_b.build_dev_server_project(project_id, cx_b).await; + let project_remote = client_b.join_remote_project(project_id, cx_b).await; // Wait for it to catch up to the new status executor.run_until_parked(); @@ -2967,7 +2967,7 @@ async fn test_git_status_sync( }); // And synchronization while joining - let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_remote_c = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); project_remote_c.read_with(cx_c, |project, cx| { @@ -3015,7 +3015,7 @@ async fn test_fs_operations( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap()); @@ -3316,7 +3316,7 @@ async fn test_local_settings( executor.run_until_parked(); // As client B, join that project and observe the local settings. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap()); executor.run_until_parked(); @@ -3439,7 +3439,7 @@ async fn test_buffer_conflict_after_save( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client B let buffer_b = project_b @@ -3503,7 +3503,7 @@ async fn test_buffer_reloading( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client B let buffer_b = project_b @@ -3557,7 +3557,7 @@ async fn test_editing_while_guest_opens_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open a buffer as client A let buffer_a = project_a @@ -3605,7 +3605,7 @@ async fn test_leaving_worktree_while_opening_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // See that a guest has joined as client A. executor.run_until_parked(); @@ -3652,7 +3652,7 @@ async fn test_canceling_buffer_opening( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let buffer_a = project_a .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) @@ -3709,8 +3709,8 @@ async fn test_leaving_project( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b1 = client_b.build_dev_server_project(project_id, cx_b).await; - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_b1 = client_b.join_remote_project(project_id, cx_b).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; // Client A sees that a guest has joined. executor.run_until_parked(); @@ -3751,7 +3751,7 @@ async fn test_leaving_project( }); // Client B re-joins the project and can open buffers as before. - let project_b2 = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b2 = client_b.join_remote_project(project_id, cx_b).await; executor.run_until_parked(); project_a.read_with(cx_a, |project, _| { @@ -3927,7 +3927,7 @@ async fn test_collaborating_with_diagnostics( ); // Join the worktree as client B. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Wait for server to see the diagnostics update. executor.run_until_parked(); @@ -3952,7 +3952,7 @@ async fn test_collaborating_with_diagnostics( }); // Join project as client C and observe the diagnostics. - let project_c = client_c.build_dev_server_project(project_id, cx_c).await; + let project_c = client_c.join_remote_project(project_id, cx_c).await; executor.run_until_parked(); let project_c_diagnostic_summaries = Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| { @@ -4160,7 +4160,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering( .unwrap(); // Join the project as client B and open all three files. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| { project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, file_name), cx)) })) @@ -4266,7 +4266,7 @@ async fn test_reloading_buffer_manually( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); @@ -4364,7 +4364,7 @@ async fn test_formatting_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); @@ -4486,7 +4486,7 @@ async fn test_prettier_formatting_buffer( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); @@ -4599,7 +4599,7 @@ async fn test_definition( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file on client B. let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); @@ -4744,7 +4744,7 @@ async fn test_references( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file on client B. let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx)); @@ -4901,7 +4901,7 @@ async fn test_project_search( .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Perform a search as the guest. let mut results = HashMap::default(); @@ -4991,7 +4991,7 @@ async fn test_document_highlights( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file on client B. let open_b = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)); @@ -5109,7 +5109,7 @@ async fn test_lsp_hover( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Open the file as the guest let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)); @@ -5286,7 +5286,7 @@ async fn test_project_symbols( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Cause the language server to start. let open_buffer_task = @@ -5381,7 +5381,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let open_buffer_task = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); let buffer_b1 = cx_b.executor().spawn(open_buffer_task).await.unwrap(); @@ -6470,7 +6470,7 @@ async fn test_context_collaboration_with_reconnect( .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await .unwrap(); - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; // Client A sees that a guest has joined. executor.run_until_parked(); diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index bad5ef9053ce70..a9cc32c1dd3a67 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -9,7 +9,7 @@ use remote_server::HeadlessProject; use serde_json::json; use std::{path::Path, sync::Arc}; -#[gpui::test] +#[gpui::test(iterations = 10)] async fn test_sharing_an_ssh_remote_project( cx_a: &mut TestAppContext, cx_b: &mut TestAppContext, @@ -54,9 +54,8 @@ async fn test_sharing_an_ssh_remote_project( let (project_a, worktree_id) = client_a .build_ssh_project("/code/project1", client_ssh, cx_a) .await; - executor.run_until_parked(); - // User A shares the remote project. + // While the SSH worktree is being scanned, user A shares the remote project. let active_call_a = cx_a.read(ActiveCall::global); let project_id = active_call_a .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) @@ -64,12 +63,30 @@ async fn test_sharing_an_ssh_remote_project( .unwrap(); // User B joins the project. - let project_b = client_b.build_dev_server_project(project_id, cx_b).await; + let project_b = client_b.join_remote_project(project_id, cx_b).await; let worktree_b = project_b .update(cx_b, |project, cx| project.worktree_for_id(worktree_id, cx)) .unwrap(); + let worktree_a = project_a + .update(cx_a, |project, cx| project.worktree_for_id(worktree_id, cx)) + .unwrap(); + executor.run_until_parked(); + + worktree_a.update(cx_a, |worktree, _cx| { + assert_eq!( + worktree.paths().map(Arc::as_ref).collect::>(), + vec![ + Path::new(".zed"), + Path::new(".zed/settings.json"), + Path::new("README.md"), + Path::new("src"), + Path::new("src/lib.rs"), + ] + ); + }); + worktree_b.update(cx_b, |worktree, _cx| { assert_eq!( worktree.paths().map(Arc::as_ref).collect::>(), diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 6f07d76b0b26b0..94c7d3907ff4ff 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -921,7 +921,7 @@ impl TestClient { }) } - pub async fn build_dev_server_project( + pub async fn join_remote_project( &self, host_project_id: u64, guest_cx: &mut TestAppContext, diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index e445eab2dd6392..1fc04a0d0b4f44 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -204,8 +204,11 @@ impl WorktreeStore { self.loading_worktrees.insert(path.clone(), task.shared()); } let task = self.loading_worktrees.get(&path).unwrap().clone(); - cx.background_executor().spawn(async move { - match task.await { + cx.spawn(|this, mut cx| async move { + let result = task.await; + this.update(&mut cx, |this, _| this.loading_worktrees.remove(&path)) + .ok(); + match result { Ok(worktree) => Ok(worktree), Err(err) => Err((*err).cloned()), } @@ -219,7 +222,8 @@ impl WorktreeStore { visible: bool, cx: &mut ModelContext, ) -> Task, Arc>> { - let mut abs_path = abs_path.as_ref().to_string_lossy().to_string(); + let path_key: Arc = abs_path.as_ref().into(); + let mut abs_path = path_key.clone().to_string_lossy().to_string(); // If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/` // in which case want to strip the leading the `/`. // On the host-side, the `~` will get expanded. @@ -261,8 +265,9 @@ impl WorktreeStore { ) })?; - this.update(&mut cx, |this, cx| this.add(&worktree, cx))?; - + this.update(&mut cx, |this, cx| { + this.add(&worktree, cx); + })?; Ok(worktree) }) } @@ -280,10 +285,6 @@ impl WorktreeStore { cx.spawn(move |this, mut cx| async move { let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await; - this.update(&mut cx, |project, _| { - project.loading_worktrees.remove(&path); - })?; - let worktree = worktree?; this.update(&mut cx, |this, cx| this.add(&worktree, cx))?; @@ -317,7 +318,7 @@ impl WorktreeStore { }); let abs_path = abs_path.as_ref().to_path_buf(); - cx.spawn(move |project, mut cx| async move { + cx.spawn(move |project, cx| async move { let (tx, rx) = futures::channel::oneshot::channel(); let tx = RefCell::new(Some(tx)); let Some(project) = project.upgrade() else { @@ -339,14 +340,10 @@ impl WorktreeStore { request.await?; let worktree = rx.await.map_err(|e| anyhow!(e))?; drop(observer); - project.update(&mut cx, |project, _| { - project.loading_worktrees.remove(&path); - })?; Ok(worktree) }) } - #[track_caller] pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { let worktree_id = worktree.read(cx).id(); debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id)); @@ -553,9 +550,12 @@ impl WorktreeStore { let client = client.clone(); async move { if client.is_via_collab() { - client.request(update).map(|result| result.is_ok()).await + client + .request(update) + .map(|result| result.log_err().is_some()) + .await } else { - client.send(update).is_ok() + client.send(update).log_err().is_some() } } } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 084fcf9929f014..892063942754c1 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -564,6 +564,48 @@ async fn test_canceling_buffer_opening(cx: &mut TestAppContext, server_cx: &mut }); } +#[gpui::test] +async fn test_adding_then_removing_then_adding_worktrees( + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + let (project, _headless, _fs) = init_test(cx, server_cx).await; + let (_worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + + let (worktree_2, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project2", true, cx) + }) + .await + .unwrap(); + let worktree_id_2 = worktree_2.read_with(cx, |tree, _| tree.id()); + + project.update(cx, |project, cx| project.remove_worktree(worktree_id_2, cx)); + + let (worktree_2, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project2", true, cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + worktree_2.update(cx, |worktree, _cx| { + assert!(worktree.is_visible()); + let entries = worktree.entries(true, 0).collect::>(); + assert_eq!(entries.len(), 2); + assert_eq!( + entries[1].path.to_string_lossy().to_string(), + "README.md".to_string() + ) + }) +} + fn init_logger() { if std::env::var("RUST_LOG").is_ok() { env_logger::try_init().ok(); diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index f91a832b80d783..d81c91132b9d39 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -1826,10 +1826,13 @@ impl RemoteWorktree { let initial_update = self .snapshot .build_initial_update(project_id, self.id().to_proto()); - self.updates_tx = Some(tx); + self.update_observer = Some(tx); cx.spawn(|this, mut cx| async move { let mut update = initial_update; loop { + // SSH projects use a special project ID of 0, and we need to + // remap it to the correct one here. + update.project_id = project_id; if !callback(update).await { break; } @@ -1841,7 +1844,7 @@ impl RemoteWorktree { } this.update(&mut cx, |this, _| { let this = this.as_remote_mut().unwrap(); - this.updates_tx.take(); + this.update_observer.take(); }) }) .detach(); diff --git a/script/zed-local b/script/zed-local index c3dfb2879d175d..9ec9b24af7509d 100755 --- a/script/zed-local +++ b/script/zed-local @@ -9,12 +9,18 @@ SUMMARY Each instance of Zed will be signed in as a different user specified in either \`.admins.json\` or \`.admins.default.json\`. + All arguments after the initial options will be passed through to the first + instance of Zed. This can be used to test SSH remoting along with collab, like + so: + + $ script/zed-local -2 ssh://your-ssh-uri-here + OPTIONS - --help Print this help message - --release Build Zed in release mode - -2, -3, -4, ... Spawn multiple Zed instances, with their windows tiled. - --top Arrange the Zed windows so they take up the top half of the screen. - --stable Use stable Zed release installed on local machine for all instances (except for the first one). + --help Print this help message + --release Build Zed in release mode + -2, -3, -4, ... Spawn multiple Zed instances, with their windows tiled. + --top Arrange the Zed windows so they take up the top half of the screen. + --stable Use stable Zed release installed on local machine for all instances (except for the first one). `.trim(); const { spawn, execSync, execFileSync } = require("child_process"); From c1a039a5d77f4ce2f23d17ad3ffa0bebd267b620 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 26 Sep 2024 12:10:39 -0700 Subject: [PATCH 359/762] Remove old project search code path, bump min-supported zed version for collaboration (#18404) Release Notes: - N/A --- crates/collab/src/rpc.rs | 44 +------------------ crates/collab/src/rpc/connection_pool.rs | 20 +++++---- crates/project/src/project.rs | 55 +++--------------------- crates/project/src/search.rs | 37 +--------------- crates/proto/proto/zed.proto | 19 +------- crates/proto/src/proto.rs | 4 -- 6 files changed, 22 insertions(+), 157 deletions(-) diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index bc0f827e78ba51..d9683fb8b366c1 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -474,9 +474,6 @@ impl Server { .add_request_handler(user_handler( forward_read_only_project_request::, )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) .add_request_handler(user_handler(forward_find_search_candidates_request)) .add_request_handler(user_handler( forward_read_only_project_request::, @@ -2298,7 +2295,7 @@ async fn list_remote_directory( let dev_server_connection_id = session .connection_pool() .await - .dev_server_connection_id_supporting(dev_server_id, ZedVersion::with_list_directory())?; + .online_dev_server_connection_id(dev_server_id)?; session .db() @@ -2337,10 +2334,7 @@ async fn update_dev_server_project( let dev_server_connection_id = session .connection_pool() .await - .dev_server_connection_id_supporting( - dev_server_project.dev_server_id, - ZedVersion::with_list_directory(), - )?; + .online_dev_server_connection_id(dev_server_project.dev_server_id)?; session.peer.send( dev_server_connection_id, @@ -2950,40 +2944,6 @@ async fn forward_find_search_candidates_request( .await .host_for_read_only_project_request(project_id, session.connection_id, session.user_id()) .await?; - - let host_version = session - .connection_pool() - .await - .connection(host_connection_id) - .map(|c| c.zed_version); - - if host_version.is_some_and(|host_version| host_version < ZedVersion::with_search_candidates()) - { - let query = request.query.ok_or_else(|| anyhow!("missing query"))?; - let search = proto::SearchProject { - project_id: project_id.to_proto(), - query: query.query, - regex: query.regex, - whole_word: query.whole_word, - case_sensitive: query.case_sensitive, - files_to_include: query.files_to_include, - files_to_exclude: query.files_to_exclude, - include_ignored: query.include_ignored, - }; - - let payload = session - .peer - .forward_request(session.connection_id, host_connection_id, search) - .await?; - return response.send(proto::FindSearchCandidatesResponse { - buffer_ids: payload - .locations - .into_iter() - .map(|loc| loc.buffer_id) - .collect(), - }); - } - let payload = session .peer .forward_request(session.connection_id, host_connection_id, request) diff --git a/crates/collab/src/rpc/connection_pool.rs b/crates/collab/src/rpc/connection_pool.rs index ad0131aaa18e5f..96deefba7949c7 100644 --- a/crates/collab/src/rpc/connection_pool.rs +++ b/crates/collab/src/rpc/connection_pool.rs @@ -32,15 +32,7 @@ impl fmt::Display for ZedVersion { impl ZedVersion { pub fn can_collaborate(&self) -> bool { - self.0 >= SemanticVersion::new(0, 134, 0) - } - - pub fn with_list_directory() -> ZedVersion { - ZedVersion(SemanticVersion::new(0, 145, 0)) - } - - pub fn with_search_candidates() -> ZedVersion { - ZedVersion(SemanticVersion::new(0, 151, 0)) + self.0 >= SemanticVersion::new(0, 151, 0) } } @@ -169,6 +161,16 @@ impl ConnectionPool { self.connected_dev_servers.get(&dev_server_id).copied() } + pub fn online_dev_server_connection_id( + &self, + dev_server_id: DevServerId, + ) -> Result { + match self.connected_dev_servers.get(&dev_server_id) { + Some(cid) => Ok(*cid), + None => Err(anyhow!(proto::ErrorCode::DevServerOffline)), + } + } + pub fn dev_server_connection_id_supporting( &self, dev_server_id: DevServerId, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index c3b3c383c11ffd..fa373af61951be 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -558,7 +558,6 @@ impl Project { client.add_model_message_handler(Self::handle_update_worktree); client.add_model_request_handler(Self::handle_synchronize_buffers); - client.add_model_request_handler(Self::handle_search_project); client.add_model_request_handler(Self::handle_search_candidate_buffers); client.add_model_request_handler(Self::handle_open_buffer_by_id); client.add_model_request_handler(Self::handle_open_buffer_by_path); @@ -2692,9 +2691,9 @@ impl Project { let (result_tx, result_rx) = smol::channel::unbounded(); let matching_buffers_rx = if query.is_opened_only() { - self.sort_candidate_buffers(&query, cx) + self.sort_search_candidates(&query, cx) } else { - self.search_for_candidate_buffers(&query, MAX_SEARCH_RESULT_FILES + 1, cx) + self.find_search_candidate_buffers(&query, MAX_SEARCH_RESULT_FILES + 1, cx) }; cx.spawn(|_, cx| async move { @@ -2757,7 +2756,7 @@ impl Project { result_rx } - fn search_for_candidate_buffers( + fn find_search_candidate_buffers( &mut self, query: &SearchQuery, limit: usize, @@ -2769,11 +2768,11 @@ impl Project { buffer_store.find_search_candidates(query, limit, fs, cx) }) } else { - self.search_for_candidate_buffers_remote(query, limit, cx) + self.find_search_candidates_remote(query, limit, cx) } } - fn sort_candidate_buffers( + fn sort_search_candidates( &mut self, search_query: &SearchQuery, cx: &mut ModelContext, @@ -2815,7 +2814,7 @@ impl Project { rx } - fn search_for_candidate_buffers_remote( + fn find_search_candidates_remote( &mut self, query: &SearchQuery, limit: usize, @@ -3656,46 +3655,6 @@ impl Project { Ok(proto::TaskTemplatesResponse { templates }) } - async fn handle_search_project( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result { - let peer_id = envelope.original_sender_id()?; - let query = SearchQuery::from_proto_v1(envelope.payload)?; - let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?; - - cx.spawn(move |mut cx| async move { - let mut locations = Vec::new(); - let mut limit_reached = false; - while let Some(result) = result.next().await { - match result { - SearchResult::Buffer { buffer, ranges } => { - for range in ranges { - let start = serialize_anchor(&range.start); - let end = serialize_anchor(&range.end); - let buffer_id = this.update(&mut cx, |this, cx| { - this.create_buffer_for_peer(&buffer, peer_id, cx).into() - })?; - locations.push(proto::Location { - buffer_id, - start: Some(start), - end: Some(end), - }); - } - } - SearchResult::LimitReached => limit_reached = true, - } - } - Ok(proto::SearchProjectResponse { - locations, - limit_reached, - // will restart - }) - }) - .await - } - async fn handle_search_candidate_buffers( this: Model, envelope: TypedEnvelope, @@ -3709,7 +3668,7 @@ impl Project { .ok_or_else(|| anyhow!("missing query field"))?, )?; let mut results = this.update(&mut cx, |this, cx| { - this.search_for_candidate_buffers(&query, message.limit as _, cx) + this.find_search_candidate_buffers(&query, message.limit as _, cx) })?; let mut response = proto::FindSearchCandidatesResponse { diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index d0e435aa135418..4205f3173039c2 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -147,30 +147,6 @@ impl SearchQuery { }) } - pub fn from_proto_v1(message: proto::SearchProject) -> Result { - if message.regex { - Self::regex( - message.query, - message.whole_word, - message.case_sensitive, - message.include_ignored, - deserialize_path_matches(&message.files_to_include)?, - deserialize_path_matches(&message.files_to_exclude)?, - None, - ) - } else { - Self::text( - message.query, - message.whole_word, - message.case_sensitive, - message.include_ignored, - deserialize_path_matches(&message.files_to_include)?, - deserialize_path_matches(&message.files_to_exclude)?, - None, - ) - } - } - pub fn from_proto(message: proto::SearchQuery) -> Result { if message.regex { Self::regex( @@ -194,6 +170,7 @@ impl SearchQuery { ) } } + pub fn with_replacement(mut self, new_replacement: String) -> Self { match self { Self::Text { @@ -209,18 +186,6 @@ impl SearchQuery { } } } - pub fn to_protov1(&self, project_id: u64) -> proto::SearchProject { - proto::SearchProject { - project_id, - query: self.as_str().to_string(), - regex: self.is_regex(), - whole_word: self.whole_word(), - case_sensitive: self.case_sensitive(), - include_ignored: self.include_ignored(), - files_to_include: self.files_to_include().sources().join(","), - files_to_exclude: self.files_to_exclude().sources().join(","), - } - } pub fn to_proto(&self) -> proto::SearchQuery { proto::SearchQuery { diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index d81ef35f6bffbd..07f64557f47e1d 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -108,8 +108,6 @@ message Envelope { PrepareRenameResponse prepare_rename_response = 84; PerformRename perform_rename = 85; PerformRenameResponse perform_rename_response = 86; - SearchProject search_project = 87; - SearchProjectResponse search_project_response = 88; UpdateContacts update_contacts = 89; UpdateInviteInfo update_invite_info = 90; @@ -287,6 +285,7 @@ message Envelope { CheckFileExistsResponse check_file_exists_response = 256; // current max } + reserved 87 to 88; reserved 158 to 161; reserved 166 to 169; reserved 224 to 229; @@ -1238,22 +1237,6 @@ message PerformRenameResponse { ProjectTransaction transaction = 2; } -message SearchProject { - uint64 project_id = 1; - string query = 2; - bool regex = 3; - bool whole_word = 4; - bool case_sensitive = 5; - string files_to_include = 6; - string files_to_exclude = 7; - bool include_ignored = 8; -} - -message SearchProjectResponse { - repeated Location locations = 1; - bool limit_reached = 2; -} - message SearchQuery { string query = 2; bool regex = 3; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 799d51defec718..fe1725e0d1c964 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -279,8 +279,6 @@ messages!( (SaveBuffer, Foreground), (SetChannelMemberRole, Foreground), (SetChannelVisibility, Foreground), - (SearchProject, Background), - (SearchProjectResponse, Background), (SendChannelMessage, Background), (SendChannelMessageResponse, Background), (ShareProject, Foreground), @@ -454,7 +452,6 @@ request_messages!( (RespondToChannelInvite, Ack), (RespondToContactRequest, Ack), (SaveBuffer, BufferSaved), - (SearchProject, SearchProjectResponse), (FindSearchCandidates, FindSearchCandidatesResponse), (SendChannelMessage, SendChannelMessageResponse), (SetChannelMemberRole, Ack), @@ -541,7 +538,6 @@ entity_messages!( ResolveCompletionDocumentation, ResolveInlayHint, SaveBuffer, - SearchProject, StartLanguageServer, SynchronizeBuffers, TaskContextForLocation, From e28496d4e2ef581def4854b1e7c4df8cbb542251 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 26 Sep 2024 14:01:05 -0600 Subject: [PATCH 360/762] Stop leaking isahc assumption (#18408) Users of our http_client crate knew they were interacting with isahc as they set its extensions on the request. This change adds our own equivalents for their APIs in preparation for changing the default http client. Release Notes: - N/A --- Cargo.lock | 8 -- crates/anthropic/Cargo.toml | 1 - crates/anthropic/src/anthropic.rs | 7 +- crates/copilot/Cargo.toml | 1 - crates/copilot/src/copilot_chat.rs | 7 +- crates/extension/Cargo.toml | 1 - crates/extension/src/extension_store.rs | 2 +- .../src/wasm_host/wit/since_v0_1_0.rs | 13 +-- .../src/wasm_host/wit/since_v0_2_0.rs | 13 +-- crates/feedback/Cargo.toml | 1 - crates/feedback/src/feedback_modal.rs | 3 +- .../src/providers/codeberg.rs | 8 +- .../src/providers/github.rs | 8 +- crates/google_ai/Cargo.toml | 1 - crates/google_ai/src/google_ai.rs | 7 +- crates/gpui/src/app.rs | 3 +- crates/http_client/src/http_client.rs | 90 +++++++++++-------- .../src/isahc_http_client.rs | 26 ++++-- crates/language_model/Cargo.toml | 1 - crates/language_model/src/provider/cloud.rs | 5 +- crates/open_ai/Cargo.toml | 1 - crates/open_ai/src/open_ai.rs | 7 +- crates/zed/Cargo.toml | 1 - crates/zed/src/reliability.rs | 5 +- 24 files changed, 114 insertions(+), 106 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68dad1f74613f1..85a62c9519e012 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -245,7 +245,6 @@ dependencies = [ "chrono", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -2850,7 +2849,6 @@ dependencies = [ "gpui", "http_client", "indoc", - "isahc", "language", "lsp", "menu", @@ -4128,7 +4126,6 @@ dependencies = [ "gpui", "http_client", "indexed_docs", - "isahc", "isahc_http_client", "language", "log", @@ -4289,7 +4286,6 @@ dependencies = [ "gpui", "http_client", "human_bytes", - "isahc", "language", "log", "menu", @@ -5016,7 +5012,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -6288,7 +6283,6 @@ dependencies = [ "http_client", "image", "inline_completion_button", - "isahc", "language", "log", "menu", @@ -7591,7 +7585,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -14435,7 +14428,6 @@ dependencies = [ "image_viewer", "inline_completion_button", "install_cli", - "isahc", "isahc_http_client", "journal", "language", diff --git a/crates/anthropic/Cargo.toml b/crates/anthropic/Cargo.toml index 9e48ad0e57d81d..ec12932fb74f1e 100644 --- a/crates/anthropic/Cargo.toml +++ b/crates/anthropic/Cargo.toml @@ -20,7 +20,6 @@ anyhow.workspace = true chrono.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index 91b6723e90be97..6b8972284208a1 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -6,9 +6,8 @@ use std::{pin::Pin, str::FromStr}; use anyhow::{anyhow, Context, Result}; use chrono::{DateTime, Utc}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; -use isahc::http::{HeaderMap, HeaderValue}; +use http_client::http::{HeaderMap, HeaderValue}; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; use strum::{EnumIter, EnumString}; use thiserror::Error; @@ -289,7 +288,7 @@ pub async fn stream_completion_with_rate_limit_info( .header("X-Api-Key", api_key) .header("Content-Type", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let serialized_request = serde_json::to_string(&request).context("failed to serialize request")?; diff --git a/crates/copilot/Cargo.toml b/crates/copilot/Cargo.toml index 54abbaa112060b..2a54497562a243 100644 --- a/crates/copilot/Cargo.toml +++ b/crates/copilot/Cargo.toml @@ -37,7 +37,6 @@ fs.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true language.workspace = true lsp.workspace = true menu.workspace = true diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index 5d80c89a6649dd..c5ba1bfc6a5895 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -7,8 +7,7 @@ use chrono::DateTime; use fs::Fs; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt}; use gpui::{AppContext, AsyncAppContext, Global}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use paths::home_dir; use serde::{Deserialize, Serialize}; use settings::watch_config_file; @@ -275,7 +274,7 @@ async fn request_api_token( .header("Accept", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let request = request_builder.body(AsyncBody::empty())?; @@ -332,7 +331,7 @@ async fn stream_completion( .header("Copilot-Integration-Id", "vscode-chat"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index edf6184d38475d..6ce1bd6862a1dd 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -28,7 +28,6 @@ futures.workspace = true gpui.workspace = true http_client.workspace = true indexed_docs.workspace = true -isahc.workspace = true language.workspace = true log.workspace = true lsp.workspace = true diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 5f9fbffb11b2ec..535d68326f9c3e 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -664,7 +664,7 @@ impl ExtensionStore { let content_length = response .headers() - .get(isahc::http::header::CONTENT_LENGTH) + .get(http_client::http::header::CONTENT_LENGTH) .and_then(|value| value.to_str().ok()?.parse::().ok()); let mut body = BufReader::new(response.body_mut()); diff --git a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs index 3835f58f885290..862e2e7c7f7894 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs @@ -1,5 +1,5 @@ use crate::wasm_host::{wit::ToWasmtimeResult, WasmState}; -use ::http_client::AsyncBody; +use ::http_client::{AsyncBody, HttpRequestExt}; use ::settings::{Settings, WorktreeId}; use anyhow::{anyhow, bail, Context, Result}; use async_compression::futures::bufread::GzipDecoder; @@ -8,7 +8,6 @@ use async_trait::async_trait; use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; -use isahc::config::{Configurable, RedirectPolicy}; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; @@ -297,10 +296,12 @@ fn convert_request( let mut request = ::http_client::Request::builder() .method(::http_client::Method::from(extension_request.method)) .uri(&extension_request.url) - .redirect_policy(match extension_request.redirect_policy { - http_client::RedirectPolicy::NoFollow => RedirectPolicy::None, - http_client::RedirectPolicy::FollowLimit(limit) => RedirectPolicy::Limit(limit), - http_client::RedirectPolicy::FollowAll => RedirectPolicy::Follow, + .follow_redirects(match extension_request.redirect_policy { + http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow, + http_client::RedirectPolicy::FollowLimit(limit) => { + ::http_client::RedirectPolicy::FollowLimit(limit) + } + http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll, }); for (key, value) in &extension_request.headers { request = request.header(key, value); diff --git a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs index eb6e1a09a2ae99..e7f5432e1d32cc 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs @@ -1,5 +1,5 @@ use crate::wasm_host::{wit::ToWasmtimeResult, WasmState}; -use ::http_client::AsyncBody; +use ::http_client::{AsyncBody, HttpRequestExt}; use ::settings::{Settings, WorktreeId}; use anyhow::{anyhow, bail, Context, Result}; use async_compression::futures::bufread::GzipDecoder; @@ -8,7 +8,6 @@ use async_trait::async_trait; use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; -use isahc::config::{Configurable, RedirectPolicy}; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; @@ -213,10 +212,12 @@ fn convert_request( let mut request = ::http_client::Request::builder() .method(::http_client::Method::from(extension_request.method)) .uri(&extension_request.url) - .redirect_policy(match extension_request.redirect_policy { - http_client::RedirectPolicy::NoFollow => RedirectPolicy::None, - http_client::RedirectPolicy::FollowLimit(limit) => RedirectPolicy::Limit(limit), - http_client::RedirectPolicy::FollowAll => RedirectPolicy::Follow, + .follow_redirects(match extension_request.redirect_policy { + http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow, + http_client::RedirectPolicy::FollowLimit(limit) => { + ::http_client::RedirectPolicy::FollowLimit(limit) + } + http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll, }); for (key, value) in &extension_request.headers { request = request.header(key, value); diff --git a/crates/feedback/Cargo.toml b/crates/feedback/Cargo.toml index 83c726e3e9ab4a..0447858ca53b4f 100644 --- a/crates/feedback/Cargo.toml +++ b/crates/feedback/Cargo.toml @@ -23,7 +23,6 @@ editor.workspace = true futures.workspace = true gpui.workspace = true human_bytes = "0.4.1" -isahc.workspace = true http_client.workspace = true language.workspace = true log.workspace = true diff --git a/crates/feedback/src/feedback_modal.rs b/crates/feedback/src/feedback_modal.rs index 4762b228d3e44f..5270492aee5c33 100644 --- a/crates/feedback/src/feedback_modal.rs +++ b/crates/feedback/src/feedback_modal.rs @@ -11,7 +11,6 @@ use gpui::{ PromptLevel, Render, Task, View, ViewContext, }; use http_client::HttpClient; -use isahc::Request; use language::Buffer; use project::Project; use regex::Regex; @@ -299,7 +298,7 @@ impl FeedbackModal { is_staff: is_staff.unwrap_or(false), }; let json_bytes = serde_json::to_vec(&request)?; - let request = Request::post(feedback_endpoint) + let request = http_client::http::Request::post(feedback_endpoint) .header("content-type", "application/json") .body(json_bytes.into())?; let mut response = http_client.send(request).await?; diff --git a/crates/git_hosting_providers/src/providers/codeberg.rs b/crates/git_hosting_providers/src/providers/codeberg.rs index eaadca1ecf9618..3f6a016f68fd4a 100644 --- a/crates/git_hosting_providers/src/providers/codeberg.rs +++ b/crates/git_hosting_providers/src/providers/codeberg.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::{AsyncBody, HttpClient, Request}; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request}; use serde::Deserialize; use url::Url; @@ -49,14 +49,16 @@ impl Codeberg { let url = format!("https://codeberg.org/api/v1/repos/{repo_owner}/{repo}/git/commits/{commit}"); - let mut request = Request::get(&url).header("Content-Type", "application/json"); + let mut request = Request::get(&url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); if let Ok(codeberg_token) = std::env::var("CODEBERG_TOKEN") { request = request.header("Authorization", format!("Bearer {}", codeberg_token)); } let mut response = client - .send_with_redirect_policy(request.body(AsyncBody::default())?, true) + .send(request.body(AsyncBody::default())?) .await .with_context(|| format!("error fetching Codeberg commit details at {:?}", url))?; diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 77eaa80961e61c..4078025fa004fc 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -3,7 +3,7 @@ use std::sync::{Arc, OnceLock}; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::{AsyncBody, HttpClient, Request}; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request}; use regex::Regex; use serde::Deserialize; use url::Url; @@ -53,14 +53,16 @@ impl Github { ) -> Result> { let url = format!("https://api.github.com/repos/{repo_owner}/{repo}/commits/{commit}"); - let mut request = Request::get(&url).header("Content-Type", "application/json"); + let mut request = Request::get(&url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); if let Ok(github_token) = std::env::var("GITHUB_TOKEN") { request = request.header("Authorization", format!("Bearer {}", github_token)); } let mut response = client - .send_with_redirect_policy(request.body(AsyncBody::default())?, true) + .send(request.body(AsyncBody::default())?) .await .with_context(|| format!("error fetching GitHub commit details at {:?}", url))?; diff --git a/crates/google_ai/Cargo.toml b/crates/google_ai/Cargo.toml index 2a52f1968dcb6b..f923e0ec917426 100644 --- a/crates/google_ai/Cargo.toml +++ b/crates/google_ai/Cargo.toml @@ -18,7 +18,6 @@ schemars = ["dep:schemars"] anyhow.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index f1dcedf5b31e0c..7991c67956bb85 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -2,8 +2,7 @@ mod supported_countries; use anyhow::{anyhow, Result}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; use std::time::Duration; @@ -30,7 +29,7 @@ pub async fn stream_generate_content( .header("Content-Type", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); }; let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; @@ -85,7 +84,7 @@ pub async fn count_tokens( .header("Content-Type", "application/json"); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); } let http_request = request_builder.body(AsyncBody::from(request))?; diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 6cb491b100810b..540e459ce1a314 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1524,10 +1524,9 @@ pub struct KeystrokeEvent { struct NullHttpClient; impl HttpClient for NullHttpClient { - fn send_with_redirect_policy( + fn send( &self, _req: http_client::Request, - _follow_redirects: bool, ) -> futures::future::BoxFuture< 'static, Result, anyhow::Error>, diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index c0630151519c5a..2f029a1d236bba 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -10,22 +10,46 @@ use futures::future::BoxFuture; use http::request::Builder; #[cfg(feature = "test-support")] use std::fmt; -use std::sync::{Arc, Mutex}; +use std::{ + sync::{Arc, Mutex}, + time::Duration, +}; pub use url::Url; +pub struct ReadTimeout(pub Duration); +#[derive(Default, Debug, Clone)] +pub enum RedirectPolicy { + #[default] + NoFollow, + FollowLimit(u32), + FollowAll, +} +pub struct FollowRedirects(pub bool); + +pub trait HttpRequestExt { + /// Set a read timeout on the request. + /// For isahc, this is the low_speed_timeout. + /// For other clients, this is the timeout used for read calls when reading the response. + /// In all cases this prevents servers stalling completely, but allows them to send data slowly. + fn read_timeout(self, timeout: Duration) -> Self; + /// Whether or not to follow redirects + fn follow_redirects(self, follow: RedirectPolicy) -> Self; +} + +impl HttpRequestExt for http::request::Builder { + fn read_timeout(self, timeout: Duration) -> Self { + self.extension(ReadTimeout(timeout)) + } + + fn follow_redirects(self, follow: RedirectPolicy) -> Self { + self.extension(follow) + } +} + pub trait HttpClient: 'static + Send + Sync { fn send( &self, req: http::Request, - ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.send_with_redirect_policy(req, false) - } - - // TODO: Make a better API for this - fn send_with_redirect_policy( - &self, - req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>>; fn get<'a>( @@ -34,14 +58,17 @@ pub trait HttpClient: 'static + Send + Sync { body: AsyncBody, follow_redirects: bool, ) -> BoxFuture<'a, Result, anyhow::Error>> { - let request = Builder::new().uri(uri).body(body); + let request = Builder::new() + .uri(uri) + .follow_redirects(if follow_redirects { + RedirectPolicy::FollowAll + } else { + RedirectPolicy::NoFollow + }) + .body(body); match request { - Ok(request) => Box::pin(async move { - self.send_with_redirect_policy(request, follow_redirects) - .await - .map_err(Into::into) - }), + Ok(request) => Box::pin(async move { self.send(request).await.map_err(Into::into) }), Err(e) => Box::pin(async move { Err(e.into()) }), } } @@ -92,12 +119,11 @@ impl HttpClientWithProxy { } impl HttpClient for HttpClientWithProxy { - fn send_with_redirect_policy( + fn send( &self, req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.client.send_with_redirect_policy(req, follow_redirects) + self.client.send(req) } fn proxy(&self) -> Option<&Uri> { @@ -106,12 +132,11 @@ impl HttpClient for HttpClientWithProxy { } impl HttpClient for Arc { - fn send_with_redirect_policy( + fn send( &self, req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.client.send_with_redirect_policy(req, follow_redirects) + self.client.send(req) } fn proxy(&self) -> Option<&Uri> { @@ -218,12 +243,11 @@ impl HttpClientWithUrl { } impl HttpClient for Arc { - fn send_with_redirect_policy( + fn send( &self, req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.client.send_with_redirect_policy(req, follow_redirects) + self.client.send(req) } fn proxy(&self) -> Option<&Uri> { @@ -232,12 +256,11 @@ impl HttpClient for Arc { } impl HttpClient for HttpClientWithUrl { - fn send_with_redirect_policy( + fn send( &self, req: Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.client.send_with_redirect_policy(req, follow_redirects) + self.client.send(req) } fn proxy(&self) -> Option<&Uri> { @@ -283,14 +306,6 @@ impl HttpClient for BlockedHttpClient { fn proxy(&self) -> Option<&Uri> { None } - - fn send_with_redirect_policy( - &self, - req: Request, - _: bool, - ) -> BoxFuture<'static, Result, anyhow::Error>> { - self.send(req) - } } #[cfg(feature = "test-support")] @@ -352,10 +367,9 @@ impl fmt::Debug for FakeHttpClient { #[cfg(feature = "test-support")] impl HttpClient for FakeHttpClient { - fn send_with_redirect_policy( + fn send( &self, req: Request, - _follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { let future = (self.handler)(req); future diff --git a/crates/isahc_http_client/src/isahc_http_client.rs b/crates/isahc_http_client/src/isahc_http_client.rs index 6c40b9f53b3f8e..778f6a04598909 100644 --- a/crates/isahc_http_client/src/isahc_http_client.rs +++ b/crates/isahc_http_client/src/isahc_http_client.rs @@ -1,7 +1,6 @@ use std::{mem, sync::Arc, time::Duration}; use futures::future::BoxFuture; -use isahc::config::RedirectPolicy; use util::maybe; pub use isahc::config::Configurable; @@ -36,18 +35,29 @@ impl HttpClient for IsahcHttpClient { None } - fn send_with_redirect_policy( + fn send( &self, req: http_client::http::Request, - follow_redirects: bool, ) -> BoxFuture<'static, Result, anyhow::Error>> { + let redirect_policy = req + .extensions() + .get::() + .cloned() + .unwrap_or_default(); + let read_timeout = req + .extensions() + .get::() + .map(|t| t.0); let req = maybe!({ let (mut parts, body) = req.into_parts(); let mut builder = isahc::Request::builder() .method(parts.method) .uri(parts.uri) .version(parts.version); + if let Some(read_timeout) = read_timeout { + builder = builder.low_speed_timeout(100, read_timeout); + } let headers = builder.headers_mut()?; mem::swap(headers, &mut parts.headers); @@ -64,10 +74,12 @@ impl HttpClient for IsahcHttpClient { }; builder - .redirect_policy(if follow_redirects { - RedirectPolicy::Follow - } else { - RedirectPolicy::None + .redirect_policy(match redirect_policy { + http_client::RedirectPolicy::FollowAll => isahc::config::RedirectPolicy::Follow, + http_client::RedirectPolicy::FollowLimit(limit) => { + isahc::config::RedirectPolicy::Limit(limit) + } + http_client::RedirectPolicy::NoFollow => isahc::config::RedirectPolicy::None, }) .body(isahc_body) .ok() diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index b63428c544369b..ef273ac44fca39 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -32,7 +32,6 @@ futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true http_client.workspace = true -isahc.workspace = true inline_completion_button.workspace = true log.workspace = true menu.workspace = true diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 606a6fbacec7b0..3c407b77d929de 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -18,8 +18,7 @@ use gpui::{ AnyElement, AnyView, AppContext, AsyncAppContext, FontWeight, Model, ModelContext, Subscription, Task, }; -use http_client::{AsyncBody, HttpClient, Method, Response}; -use isahc::config::Configurable; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Response}; use schemars::JsonSchema; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::value::RawValue; @@ -396,7 +395,7 @@ impl CloudLanguageModel { let response = loop { let mut request_builder = http_client::Request::builder(); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); }; let request = request_builder .method(Method::POST) diff --git a/crates/open_ai/Cargo.toml b/crates/open_ai/Cargo.toml index db9c77bac6bfe4..4f729598f82cdb 100644 --- a/crates/open_ai/Cargo.toml +++ b/crates/open_ai/Cargo.toml @@ -19,7 +19,6 @@ schemars = ["dep:schemars"] anyhow.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index e67fe1af27cdb8..6a24eec69610c7 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -6,8 +6,7 @@ use futures::{ stream::{self, BoxStream}, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt, }; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::{ @@ -318,7 +317,7 @@ pub async fn complete( .header("Content-Type", "application/json") .header("Authorization", format!("Bearer {}", api_key)); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); }; let mut request_body = request; @@ -413,7 +412,7 @@ pub async fn stream_completion( .header("Authorization", format!("Bearer {}", api_key)); if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + request_builder = request_builder.read_timeout(low_speed_timeout); }; let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 897e0e9a28bca8..5422f8b29aa98d 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -57,7 +57,6 @@ http_client.workspace = true image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true -isahc.workspace = true isahc_http_client.workspace = true journal.workspace = true language.workspace = true diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 9e811d7c9afbb4..50e5a05b823ed0 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -4,8 +4,7 @@ use chrono::Utc; use client::telemetry; use db::kvp::KEY_VALUE_STORE; use gpui::{AppContext, SemanticVersion}; -use http_client::Method; -use isahc::config::Configurable; +use http_client::{HttpRequestExt, Method}; use http_client::{self, HttpClient, HttpClientWithUrl}; use paths::{crashes_dir, crashes_retired_dir}; @@ -491,7 +490,7 @@ async fn upload_previous_crashes( .context("error reading crash file")?; let mut request = http_client::Request::post(&crash_report_url.to_string()) - .redirect_policy(isahc::config::RedirectPolicy::Follow) + .follow_redirects(http_client::RedirectPolicy::FollowAll) .header("Content-Type", "text/plain"); if let Some((panicked_on, payload)) = most_recent_panic.as_ref() { From 48c6eb9ac7a0cebc096cf576c163d054de41c92f Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 26 Sep 2024 16:21:20 -0400 Subject: [PATCH 361/762] Add script to generate license dependencies as csv (#18411) Co-authored-by: Joseph T. Lyons --- .gitignore | 2 +- script/generate-licenses | 6 ++++- script/generate-licenses-csv | 23 +++++++++++++++++++ script/licenses/template.csv.hbs | 6 +++++ .../{template.hbs.md => template.md.hbs} | 0 5 files changed, 35 insertions(+), 2 deletions(-) create mode 100755 script/generate-licenses-csv create mode 100644 script/licenses/template.csv.hbs rename script/licenses/{template.hbs.md => template.md.hbs} (100%) diff --git a/.gitignore b/.gitignore index 634b73ac943cc3..d19c5a102aac8a 100644 --- a/.gitignore +++ b/.gitignore @@ -10,7 +10,7 @@ /crates/collab/seed.json /crates/zed/resources/flatpak/flatpak-cargo-sources.json /dev.zed.Zed*.json -/assets/*licenses.md +/assets/*licenses.* **/venv .build *.wasm diff --git a/script/generate-licenses b/script/generate-licenses index 43b2f5c4588af4..9602813f0ced27 100755 --- a/script/generate-licenses +++ b/script/generate-licenses @@ -4,6 +4,7 @@ set -euo pipefail CARGO_ABOUT_VERSION="0.6.1" OUTPUT_FILE="${1:-$(pwd)/assets/licenses.md}" +TEMPLATE_FILE="script/licenses/template.md.hbs" > $OUTPUT_FILE @@ -23,7 +24,10 @@ else fi echo "Generating cargo licenses" -cargo about generate --fail -c script/licenses/zed-licenses.toml script/licenses/template.hbs.md >> $OUTPUT_FILE +cargo about generate \ + --fail \ + -c script/licenses/zed-licenses.toml \ + "${TEMPLATE_FILE}" >> $OUTPUT_FILE sed -i.bak 's/"/"/g' $OUTPUT_FILE diff --git a/script/generate-licenses-csv b/script/generate-licenses-csv new file mode 100755 index 00000000000000..63bfee83a5cbb1 --- /dev/null +++ b/script/generate-licenses-csv @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -euo pipefail + +CARGO_ABOUT_VERSION="0.6.1" +OUTPUT_FILE="${1:-$(pwd)/assets/licenses.csv}" +TEMPLATE_FILE="script/licenses/template.csv.hbs" + +if ! cargo install --list | grep "cargo-about v$CARGO_ABOUT_VERSION" > /dev/null; then + echo "Installing cargo-about@$CARGO_ABOUT_VERSION..." + cargo install "cargo-about@$CARGO_ABOUT_VERSION" +else + echo "cargo-about@$CARGO_ABOUT_VERSION is already installed." +fi + +echo "Generating cargo licenses" +cargo about generate \ + --fail \ + -c script/licenses/zed-licenses.toml \ + script/licenses/template.csv.hbs \ + | awk 'NR==1{print;next} NF{print | "sort"}' \ + > $OUTPUT_FILE +echo "Completed. See $OUTPUT_FILE" diff --git a/script/licenses/template.csv.hbs b/script/licenses/template.csv.hbs new file mode 100644 index 00000000000000..1459aa648d8da8 --- /dev/null +++ b/script/licenses/template.csv.hbs @@ -0,0 +1,6 @@ +Crate Name,Crate Version,License,Url +{{#each licenses}} +{{#each used_by}} +{{crate.name}},{{crate.version}},{{../name}},{{#if crate.repository}}{{crate.repository}}{{else}}https://crates.io/crates/{{crate.name}}{{/if}} +{{/each}} +{{/each}} diff --git a/script/licenses/template.hbs.md b/script/licenses/template.md.hbs similarity index 100% rename from script/licenses/template.hbs.md rename to script/licenses/template.md.hbs From c83d007138587d832c5ce3a13c8cb99913c016be Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 26 Sep 2024 23:43:58 +0300 Subject: [PATCH 362/762] Remove non-wrapping settings from the language configuration (#18412) Closes https://github.com/zed-industries/zed/issues/17736 Those are limited with 1024 symbols before wrapping still, and were introduced for git diff deleted hunks display. Instead of confusing people with actually wrapping, restores behavior that was before https://github.com/zed-industries/zed/pull/11080 Release Notes: - Removed confusing soft wrap option behavior ([#17736]https://github.com/zed-industries/zed/issues/17736) --- assets/settings/default.json | 7 +++---- crates/editor/src/editor.rs | 24 +++++++++++++++++------- crates/editor/src/element.rs | 6 ++---- crates/language/src/language_settings.rs | 9 +++++---- docs/src/configuring-zed.md | 6 +++--- 5 files changed, 30 insertions(+), 22 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index cf0de6a5e7f9aa..b3be17ad2cedcf 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -535,17 +535,16 @@ // How to soft-wrap long lines of text. // Possible values: // - // 1. Do not soft wrap. - // "soft_wrap": "none", // 2. Prefer a single line generally, unless an overly long line is encountered. - // "soft_wrap": "prefer_line", + // "soft_wrap": "none", + // "soft_wrap": "prefer_line", // (deprecated, same as "none") // 3. Soft wrap lines that overflow the editor. // "soft_wrap": "editor_width", // 4. Soft wrap lines at the preferred line length. // "soft_wrap": "preferred_line_length", // 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller). // "soft_wrap": "bounded", - "soft_wrap": "prefer_line", + "soft_wrap": "none", // The column at which to soft-wrap lines, for buffers where soft-wrap // is enabled. "preferred_line_length": 80, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 54d23a8219d4f4..b7f825df9eec60 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -376,12 +376,20 @@ pub enum EditorMode { Full, } -#[derive(Clone, Debug)] +#[derive(Copy, Clone, Debug)] pub enum SoftWrap { + /// Prefer not to wrap at all. + /// + /// Note: this is currently internal, as actually limited by [`crate::MAX_LINE_LEN`] until it wraps. + /// The mode is used inside git diff hunks, where it's seems currently more useful to not wrap as much as possible. + GitDiff, + /// Prefer a single line generally, unless an overly long line is encountered. None, - PreferLine, + /// Soft wrap lines that exceed the editor width. EditorWidth, + /// Soft wrap lines at the preferred line length. Column(u32), + /// Soft wrap line at the preferred line length or the editor width (whichever is smaller). Bounded(u32), } @@ -1837,7 +1845,7 @@ impl Editor { let blink_manager = cx.new_model(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx)); let soft_wrap_mode_override = matches!(mode, EditorMode::SingleLine { .. }) - .then(|| language_settings::SoftWrap::PreferLine); + .then(|| language_settings::SoftWrap::None); let mut project_subscriptions = Vec::new(); if mode == EditorMode::Full { @@ -10898,8 +10906,9 @@ impl Editor { let settings = self.buffer.read(cx).settings_at(0, cx); let mode = self.soft_wrap_mode_override.unwrap_or(settings.soft_wrap); match mode { - language_settings::SoftWrap::None => SoftWrap::None, - language_settings::SoftWrap::PreferLine => SoftWrap::PreferLine, + language_settings::SoftWrap::PreferLine | language_settings::SoftWrap::None => { + SoftWrap::None + } language_settings::SoftWrap::EditorWidth => SoftWrap::EditorWidth, language_settings::SoftWrap::PreferredLineLength => { SoftWrap::Column(settings.preferred_line_length) @@ -10947,9 +10956,10 @@ impl Editor { self.soft_wrap_mode_override.take(); } else { let soft_wrap = match self.soft_wrap_mode(cx) { - SoftWrap::None | SoftWrap::PreferLine => language_settings::SoftWrap::EditorWidth, + SoftWrap::GitDiff => return, + SoftWrap::None => language_settings::SoftWrap::EditorWidth, SoftWrap::EditorWidth | SoftWrap::Column(_) | SoftWrap::Bounded(_) => { - language_settings::SoftWrap::PreferLine + language_settings::SoftWrap::None } }; self.soft_wrap_mode_override = Some(soft_wrap); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 6f30062d47ec77..bad16b225f3298 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -4994,10 +4994,8 @@ impl Element for EditorElement { snapshot } else { let wrap_width = match editor.soft_wrap_mode(cx) { - SoftWrap::None => None, - SoftWrap::PreferLine => { - Some((MAX_LINE_LEN / 2) as f32 * em_advance) - } + SoftWrap::GitDiff => None, + SoftWrap::None => Some((MAX_LINE_LEN / 2) as f32 * em_advance), SoftWrap::EditorWidth => Some(editor_width), SoftWrap::Column(column) => Some(column as f32 * em_advance), SoftWrap::Bounded(column) => { diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index f830c5f25c308c..2f1a7be2bf492d 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -379,15 +379,16 @@ pub struct FeaturesContent { #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum SoftWrap { - /// Do not soft wrap. + /// Prefer a single line generally, unless an overly long line is encountered. None, + /// Deprecated: use None instead. Left to avoid breakin existing users' configs. /// Prefer a single line generally, unless an overly long line is encountered. PreferLine, - /// Soft wrap lines that exceed the editor width + /// Soft wrap lines that exceed the editor width. EditorWidth, - /// Soft wrap lines at the preferred line length + /// Soft wrap lines at the preferred line length. PreferredLineLength, - /// Soft wrap line at the preferred line length or the editor width (whichever is smaller) + /// Soft wrap line at the preferred line length or the editor width (whichever is smaller). Bounded, } diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 7837044a60a669..18d66708ad7fc8 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1357,12 +1357,12 @@ Or to set a `socks5` proxy: - Description: Whether or not to automatically wrap lines of text to fit editor / preferred width. - Setting: `soft_wrap` -- Default: `prefer_line` +- Default: `none` **Options** -1. `none` to stop the soft-wrapping -2. `prefer_line` to avoid wrapping generally, unless the line is too long +1. `none` to avoid wrapping generally, unless the line is too long +2. `prefer_line` (deprecated, same as `none`) 3. `editor_width` to wrap lines that overflow the editor width 4. `preferred_line_length` to wrap lines that overflow `preferred_line_length` config value From 32605e9ea4d9c1de2b23ac084d30df5c36b5f4dc Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 26 Sep 2024 20:27:49 -0600 Subject: [PATCH 363/762] Fix register selection in visual mode (#18418) Related to #12895 Release Notes: - vim: Fix register selection in visual yank --- crates/vim/src/normal/mark.rs | 1 - crates/vim/src/normal/paste.rs | 3 +++ crates/vim/test_data/test_special_registers.json | 7 +++++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/vim/src/normal/mark.rs b/crates/vim/src/normal/mark.rs index 787430e747e3f2..743ab59ee286cc 100644 --- a/crates/vim/src/normal/mark.rs +++ b/crates/vim/src/normal/mark.rs @@ -63,7 +63,6 @@ impl Vim { self.marks.insert("<".to_string(), starts); self.marks.insert(">".to_string(), ends); self.stored_visual_mode.replace((mode, reversed)); - self.clear_operator(cx); } pub fn jump(&mut self, text: Arc, line: bool, cx: &mut ViewContext) { diff --git a/crates/vim/src/normal/paste.rs b/crates/vim/src/normal/paste.rs index 05469dbf9f168f..5322f913c1769b 100644 --- a/crates/vim/src/normal/paste.rs +++ b/crates/vim/src/normal/paste.rs @@ -673,6 +673,9 @@ mod test { cx.simulate_shared_keystrokes("\" _ d d").await; cx.shared_register('_').await.assert_eq(""); + cx.simulate_shared_keystrokes("shift-v \" _ y w").await; + cx.shared_register('"').await.assert_eq("jumps"); + cx.shared_state().await.assert_eq(indoc! {" The quick brown the ˇlazy dog"}); diff --git a/crates/vim/test_data/test_special_registers.json b/crates/vim/test_data/test_special_registers.json index 8b6b098af659fd..35f181a05c4f64 100644 --- a/crates/vim/test_data/test_special_registers.json +++ b/crates/vim/test_data/test_special_registers.json @@ -10,6 +10,13 @@ {"Key":"d"} {"Get":{"state":"The quick brown\nthe ˇlazy dog","mode":"Normal"}} {"ReadRegister":{"name":"_","value":""}} +{"Key":"shift-v"} +{"Key":"\""} +{"Key":"_"} +{"Key":"y"} +{"Key":"w"} +{"Get":{"state":"The quick brown\nthe ˇlazy dog","mode":"Normal"}} +{"ReadRegister":{"name":"\"","value":"jumps"}} {"Get":{"state":"The quick brown\nthe ˇlazy dog","mode":"Normal"}} {"Key":"\""} {"Key":"\""} From 1be3c44550012c9c7657de0450044c973f8c2d10 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 26 Sep 2024 23:52:07 -0600 Subject: [PATCH 364/762] vim: Support za (#18421) Closes #6822 Updates #5142 Release Notes: - Added new fold actions to toggle folds (`cmd-k cmd-l`), fold every fold (`cmd-k cmd-0`) unfold every fold (`cmd-k cmd-j`) to fold recursively (`cmd-k cmd-[`) and unfold recursively (`cmd-k cmd-]`). - vim: Added `za` to toggle fold under cursor. - vim: Added `zO`/`zC`/`zA` to open, close and toggle folds recursively (and fixed `zc` to not recurse into selections). - vim: Added `zR`/`zM` to open/close all folds in the buffer. --- assets/keymaps/default-linux.json | 5 + assets/keymaps/default-macos.json | 5 + assets/keymaps/vim.json | 6 ++ crates/editor/src/actions.rs | 6 ++ crates/editor/src/editor.rs | 151 +++++++++++++++++++++++++++++- crates/editor/src/editor_tests.rs | 6 +- crates/editor/src/element.rs | 6 ++ 7 files changed, 179 insertions(+), 6 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index f15c4dfe22b6c0..8d4871d95648db 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -310,6 +310,11 @@ "ctrl-shift-\\": "editor::MoveToEnclosingBracket", "ctrl-shift-[": "editor::Fold", "ctrl-shift-]": "editor::UnfoldLines", + "ctrl-k ctrl-l": "editor::ToggleFold", + "ctrl-k ctrl-[": "editor::FoldRecursive", + "ctrl-k ctrl-]": "editor::UnfoldRecursive", + "ctrl-k ctrl-0": "editor::FoldAll", + "ctrl-k ctrl-j": "editor::UnfoldAll", "ctrl-space": "editor::ShowCompletions", "ctrl-.": "editor::ToggleCodeActions", "alt-ctrl-r": "editor::RevealInFileManager", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index a58112b3c0b927..a980ae14e22a95 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -347,6 +347,11 @@ "cmd-shift-\\": "editor::MoveToEnclosingBracket", "alt-cmd-[": "editor::Fold", "alt-cmd-]": "editor::UnfoldLines", + "cmd-k cmd-l": "editor::ToggleFold", + "cmd-k cmd-[": "editor::FoldRecursive", + "cmd-k cmd-]": "editor::UnfoldRecursive", + "cmd-k cmd-0": "editor::FoldAll", + "cmd-k cmd-j": "editor::UnfoldAll", "ctrl-space": "editor::ShowCompletions", "cmd-.": "editor::ToggleCodeActions", "alt-cmd-r": "editor::RevealInFileManager", diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 6656ea0ddf22c3..f3a088f11e5d27 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -132,9 +132,15 @@ "z z": "editor::ScrollCursorCenter", "z .": ["workspace::SendKeystrokes", "z z ^"], "z b": "editor::ScrollCursorBottom", + "z a": "editor::ToggleFold", + "z A": "editor::ToggleFoldRecursive", "z c": "editor::Fold", + "z C": "editor::FoldRecursive", "z o": "editor::UnfoldLines", + "z O": "editor::UnfoldRecursive", "z f": "editor::FoldSelectedRanges", + "z M": "editor::FoldAll", + "z R": "editor::UnfoldAll", "shift-z shift-q": ["pane::CloseActiveItem", { "saveIntent": "skip" }], "shift-z shift-z": ["pane::CloseActiveItem", { "saveIntent": "saveAll" }], // Count support diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 2383c7f71af8a9..b5935782580ba3 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -230,7 +230,11 @@ gpui::actions!( ExpandMacroRecursively, FindAllReferences, Fold, + FoldAll, + FoldRecursive, FoldSelectedRanges, + ToggleFold, + ToggleFoldRecursive, Format, GoToDeclaration, GoToDeclarationSplit, @@ -340,7 +344,9 @@ gpui::actions!( Transpose, Undo, UndoSelection, + UnfoldAll, UnfoldLines, + UnfoldRecursive, UniqueLinesCaseInsensitive, UniqueLinesCaseSensitive, ] diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b7f825df9eec60..44de6014ec8137 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10551,17 +10551,79 @@ impl Editor { } } - pub fn fold(&mut self, _: &actions::Fold, cx: &mut ViewContext) { - let mut fold_ranges = Vec::new(); + pub fn toggle_fold(&mut self, _: &actions::ToggleFold, cx: &mut ViewContext) { + let selection = self.selections.newest::(cx); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let range = if selection.is_empty() { + let point = selection.head().to_display_point(&display_map); + let start = DisplayPoint::new(point.row(), 0).to_point(&display_map); + let end = DisplayPoint::new(point.row(), display_map.line_len(point.row())) + .to_point(&display_map); + start..end + } else { + selection.range() + }; + if display_map.folds_in_range(range).next().is_some() { + self.unfold_lines(&Default::default(), cx) + } else { + self.fold(&Default::default(), cx) + } + } + + pub fn toggle_fold_recursive( + &mut self, + _: &actions::ToggleFoldRecursive, + cx: &mut ViewContext, + ) { + let selection = self.selections.newest::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let range = if selection.is_empty() { + let point = selection.head().to_display_point(&display_map); + let start = DisplayPoint::new(point.row(), 0).to_point(&display_map); + let end = DisplayPoint::new(point.row(), display_map.line_len(point.row())) + .to_point(&display_map); + start..end + } else { + selection.range() + }; + if display_map.folds_in_range(range).next().is_some() { + self.unfold_recursive(&Default::default(), cx) + } else { + self.fold_recursive(&Default::default(), cx) + } + } + pub fn fold(&mut self, _: &actions::Fold, cx: &mut ViewContext) { + let mut fold_ranges = Vec::new(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let selections = self.selections.all_adjusted(cx); + for selection in selections { let range = selection.range().sorted(); let buffer_start_row = range.start.row; - for row in (0..=range.end.row).rev() { + if range.start.row != range.end.row { + let mut found = false; + let mut row = range.start.row; + while row <= range.end.row { + if let Some((foldable_range, fold_text)) = + { display_map.foldable_range(MultiBufferRow(row)) } + { + found = true; + row = foldable_range.end.row + 1; + fold_ranges.push((foldable_range, fold_text)); + } else { + row += 1 + } + } + if found { + continue; + } + } + + for row in (0..=range.start.row).rev() { if let Some((foldable_range, fold_text)) = display_map.foldable_range(MultiBufferRow(row)) { @@ -10578,6 +10640,61 @@ impl Editor { self.fold_ranges(fold_ranges, true, cx); } + pub fn fold_all(&mut self, _: &actions::FoldAll, cx: &mut ViewContext) { + let mut fold_ranges = Vec::new(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + + for row in 0..display_map.max_buffer_row().0 { + if let Some((foldable_range, fold_text)) = + display_map.foldable_range(MultiBufferRow(row)) + { + fold_ranges.push((foldable_range, fold_text)); + } + } + + self.fold_ranges(fold_ranges, true, cx); + } + + pub fn fold_recursive(&mut self, _: &actions::FoldRecursive, cx: &mut ViewContext) { + let mut fold_ranges = Vec::new(); + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all_adjusted(cx); + + for selection in selections { + let range = selection.range().sorted(); + let buffer_start_row = range.start.row; + + if range.start.row != range.end.row { + let mut found = false; + for row in range.start.row..=range.end.row { + if let Some((foldable_range, fold_text)) = + { display_map.foldable_range(MultiBufferRow(row)) } + { + found = true; + fold_ranges.push((foldable_range, fold_text)); + } + } + if found { + continue; + } + } + + for row in (0..=range.start.row).rev() { + if let Some((foldable_range, fold_text)) = + display_map.foldable_range(MultiBufferRow(row)) + { + if foldable_range.end.row >= buffer_start_row { + fold_ranges.push((foldable_range, fold_text)); + } else { + break; + } + } + } + } + + self.fold_ranges(fold_ranges, true, cx); + } + pub fn fold_at(&mut self, fold_at: &FoldAt, cx: &mut ViewContext) { let buffer_row = fold_at.buffer_row; let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -10612,6 +10729,24 @@ impl Editor { self.unfold_ranges(ranges, true, true, cx); } + pub fn unfold_recursive(&mut self, _: &UnfoldRecursive, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let selections = self.selections.all::(cx); + let ranges = selections + .iter() + .map(|s| { + let mut range = s.display_range(&display_map).sorted(); + *range.start.column_mut() = 0; + *range.end.column_mut() = display_map.line_len(range.end.row()); + let start = range.start.to_point(&display_map); + let end = range.end.to_point(&display_map); + start..end + }) + .collect::>(); + + self.unfold_ranges(ranges, true, true, cx); + } + pub fn unfold_at(&mut self, unfold_at: &UnfoldAt, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -10630,6 +10765,16 @@ impl Editor { self.unfold_ranges(std::iter::once(intersection_range), true, autoscroll, cx) } + pub fn unfold_all(&mut self, _: &actions::UnfoldAll, cx: &mut ViewContext) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + self.unfold_ranges( + [Point::zero()..display_map.max_point().to_point(&display_map)], + true, + true, + cx, + ); + } + pub fn fold_selected_ranges(&mut self, _: &FoldSelectedRanges, cx: &mut ViewContext) { let selections = self.selections.all::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index de1b12abe00778..31a69918026f72 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -852,7 +852,7 @@ fn test_fold_action(cx: &mut TestAppContext) { _ = view.update(cx, |view, cx| { view.change_selections(None, cx, |s| { s.select_display_ranges([ - DisplayPoint::new(DisplayRow(8), 0)..DisplayPoint::new(DisplayRow(12), 0) + DisplayPoint::new(DisplayRow(7), 0)..DisplayPoint::new(DisplayRow(12), 0) ]); }); view.fold(&Fold, cx); @@ -940,7 +940,7 @@ fn test_fold_action_whitespace_sensitive_language(cx: &mut TestAppContext) { _ = view.update(cx, |view, cx| { view.change_selections(None, cx, |s| { s.select_display_ranges([ - DisplayPoint::new(DisplayRow(7), 0)..DisplayPoint::new(DisplayRow(10), 0) + DisplayPoint::new(DisplayRow(6), 0)..DisplayPoint::new(DisplayRow(10), 0) ]); }); view.fold(&Fold, cx); @@ -1022,7 +1022,7 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) { _ = view.update(cx, |view, cx| { view.change_selections(None, cx, |s| { s.select_display_ranges([ - DisplayPoint::new(DisplayRow(7), 0)..DisplayPoint::new(DisplayRow(11), 0) + DisplayPoint::new(DisplayRow(6), 0)..DisplayPoint::new(DisplayRow(11), 0) ]); }); view.fold(&Fold, cx); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index bad16b225f3298..e5c067e37ec3dc 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -335,8 +335,14 @@ impl EditorElement { register_action(view, cx, Editor::open_url); register_action(view, cx, Editor::open_file); register_action(view, cx, Editor::fold); + register_action(view, cx, Editor::fold_all); register_action(view, cx, Editor::fold_at); + register_action(view, cx, Editor::fold_recursive); + register_action(view, cx, Editor::toggle_fold); + register_action(view, cx, Editor::toggle_fold_recursive); register_action(view, cx, Editor::unfold_lines); + register_action(view, cx, Editor::unfold_recursive); + register_action(view, cx, Editor::unfold_all); register_action(view, cx, Editor::unfold_at); register_action(view, cx, Editor::fold_selected_ranges); register_action(view, cx, Editor::show_completions); From 02d05615866a35da38e31630ac41c61aaf64a59d Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Sep 2024 00:36:17 -0600 Subject: [PATCH 365/762] Fix read timeout for ollama (#18417) Supercedes: #18310 Release Notes: - Fixed `low_speed_timeout_in_seconds` for Ollama --- crates/ollama/src/ollama.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 84404afce13b10..a38b9e7a564512 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, Context, Result}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt}; -use http_client::{http, AsyncBody, HttpClient, Method, Request as HttpRequest}; +use http_client::{http, AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::{value::RawValue, Value}; @@ -262,14 +262,18 @@ pub async fn stream_chat_completion( client: &dyn HttpClient, api_url: &str, request: ChatRequest, - _: Option, + low_speed_timeout: Option, ) -> Result>> { let uri = format!("{api_url}/api/chat"); - let request_builder = http::Request::builder() + let mut request_builder = http::Request::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); + if let Some(low_speed_timeout) = low_speed_timeout { + request_builder = request_builder.read_timeout(low_speed_timeout); + } + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; if response.status().is_success() { From 8559731e0d5bb570dcbb427746d10299d12e2889 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Fri, 27 Sep 2024 14:55:35 +0800 Subject: [PATCH 366/762] project: Fix worktree store event missing in remote projects (#18376) Release Notes: - N/A --- crates/project/src/project.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index fa373af61951be..b91250e6b2c4a3 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -886,6 +886,9 @@ impl Project { cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) .detach(); + cx.subscribe(&worktree_store, Self::on_worktree_store_event) + .detach(); + cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach(); From 5199135b54c98d571af8d9cc16635fd94a0b85ac Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 27 Sep 2024 09:31:45 +0200 Subject: [PATCH 367/762] ssh remoting: Show error if opening connection timed out (#18401) This shows an error if opening a connection to a remote host didn't work in the timeout of 10s (maybe we'll need to make that configurable in the future? for now it seems fine.) ![screenshot-2024-09-26-18 01 07@2x](https://github.com/user-attachments/assets/cbfa0e9f-9c29-4b6c-bade-07fdd7393c9d) Release Notes: - N/A --------- Co-authored-by: Bennet Co-authored-by: Conrad --- crates/recent_projects/src/ssh_connections.rs | 55 +++++++--- crates/remote/src/ssh_session.rs | 101 +++++++++++++----- 2 files changed, 115 insertions(+), 41 deletions(-) diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 1722c58f075399..dd30f15f267fc7 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -16,8 +16,9 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; use ui::{ - h_flex, v_flex, FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, IntoElement, - Label, LabelCommon, Styled, StyledExt as _, ViewContext, VisualContext, WindowContext, + h_flex, v_flex, Color, FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, + IntoElement, Label, LabelCommon, Styled, StyledExt as _, ViewContext, VisualContext, + WindowContext, }; use workspace::{AppState, ModalView, Workspace}; @@ -79,6 +80,7 @@ impl Settings for SshSettings { pub struct SshPrompt { connection_string: SharedString, status_message: Option, + error_message: Option, prompt: Option<(SharedString, oneshot::Sender>)>, editor: View, } @@ -92,6 +94,7 @@ impl SshPrompt { Self { connection_string, status_message: None, + error_message: None, prompt: None, editor: cx.new_view(Editor::single_line), } @@ -121,6 +124,11 @@ impl SshPrompt { cx.notify(); } + pub fn set_error(&mut self, error_message: String, cx: &mut ViewContext) { + self.error_message = Some(error_message.into()); + cx.notify(); + } + pub fn confirm(&mut self, cx: &mut ViewContext) { if let Some((_, tx)) = self.prompt.take() { self.editor.update(cx, |editor, cx| { @@ -140,7 +148,12 @@ impl Render for SshPrompt { .child( h_flex() .gap_2() - .child( + .child(if self.error_message.is_some() { + Icon::new(IconName::XCircle) + .size(IconSize::Medium) + .color(Color::Error) + .into_any_element() + } else { Icon::new(IconName::ArrowCircle) .size(IconSize::Medium) .with_animation( @@ -149,16 +162,21 @@ impl Render for SshPrompt { |icon, delta| { icon.transform(Transformation::rotate(percentage(delta))) }, - ), - ) + ) + .into_any_element() + }) .child( Label::new(format!("ssh {}…", self.connection_string)) .size(ui::LabelSize::Large), ), ) - .when_some(self.status_message.as_ref(), |el, status| { - el.child(Label::new(status.clone())) + .when_some(self.error_message.as_ref(), |el, error| { + el.child(Label::new(error.clone())) }) + .when( + self.error_message.is_none() && self.status_message.is_some(), + |el| el.child(Label::new(self.status_message.clone().unwrap())), + ) .when_some(self.prompt.as_ref(), |el, prompt| { el.child(Label::new(prompt.0.clone())) .child(self.editor.clone()) @@ -238,6 +256,10 @@ impl remote::SshClientDelegate for SshClientDelegate { self.update_status(status, cx) } + fn set_error(&self, error: String, cx: &mut AsyncAppContext) { + self.update_error(error, cx) + } + fn get_server_binary( &self, platform: SshPlatform, @@ -270,6 +292,16 @@ impl SshClientDelegate { .ok(); } + fn update_error(&self, error: String, cx: &mut AsyncAppContext) { + self.window + .update(cx, |_, cx| { + self.ui.update(cx, |modal, cx| { + modal.set_error(error, cx); + }) + }) + .ok(); + } + async fn get_server_binary_impl( &self, platform: SshPlatform, @@ -388,7 +420,7 @@ pub async fn open_ssh_project( })? }; - let result = window + let session = window .update(cx, |workspace, cx| { cx.activate_window(); workspace.toggle_modal(cx, |cx| SshConnectionModal::new(&connection_options, cx)); @@ -400,12 +432,7 @@ pub async fn open_ssh_project( .clone(); connect_over_ssh(connection_options.clone(), ui, cx) })? - .await; - - if result.is_err() { - window.update(cx, |_, cx| cx.remove_window()).ok(); - } - let session = result?; + .await?; cx.update(|cx| { workspace::open_ssh_project(window, connection_options, session, app_state, paths, cx) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 06a7f810e67210..915595fd9d2957 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -129,6 +129,7 @@ pub trait SshClientDelegate { cx: &mut AsyncAppContext, ) -> oneshot::Receiver>; fn set_status(&self, status: Option<&str>, cx: &mut AsyncAppContext); + fn set_error(&self, error_message: String, cx: &mut AsyncAppContext); } type ResponseChannels = Mutex)>>>; @@ -208,16 +209,16 @@ impl SshSession { result = child_stdout.read(&mut stdout_buffer).fuse() => { match result { - Ok(len) => { - if len == 0 { - child_stdin.close().await?; - let status = remote_server_child.status().await?; - if !status.success() { - log::info!("channel exited with status: {status:?}"); - } - return Ok(()); + Ok(0) => { + child_stdin.close().await?; + outgoing_rx.close(); + let status = remote_server_child.status().await?; + if !status.success() { + log::error!("channel exited with status: {status:?}"); } - + return Ok(()); + } + Ok(len) => { if len < stdout_buffer.len() { child_stdout.read_exact(&mut stdout_buffer[len..]).await?; } @@ -419,8 +420,13 @@ impl SshSession { let mut response_channels_lock = self.response_channels.lock(); response_channels_lock.insert(MessageId(envelope.id), tx); drop(response_channels_lock); - self.outgoing_tx.unbounded_send(envelope).ok(); + let result = self.outgoing_tx.unbounded_send(envelope); async move { + if let Err(error) = &result { + log::error!("failed to send message: {}", error); + return Err(anyhow!("failed to send message: {}", error)); + } + let response = rx.await.context("connection lost")?.0; if let Some(proto::envelope::Payload::Error(error)) = &response.payload { return Err(RpcError::from_proto(error, type_name)); @@ -525,22 +531,25 @@ impl SshClientState { let listener = UnixListener::bind(&askpass_socket).context("failed to create askpass socket")?; - let askpass_task = cx.spawn(|mut cx| async move { - while let Ok((mut stream, _)) = listener.accept().await { - let mut buffer = Vec::new(); - let mut reader = BufReader::new(&mut stream); - if reader.read_until(b'\0', &mut buffer).await.is_err() { - buffer.clear(); - } - let password_prompt = String::from_utf8_lossy(&buffer); - if let Some(password) = delegate - .ask_password(password_prompt.to_string(), &mut cx) - .await - .context("failed to get ssh password") - .and_then(|p| p) - .log_err() - { - stream.write_all(password.as_bytes()).await.log_err(); + let askpass_task = cx.spawn({ + let delegate = delegate.clone(); + |mut cx| async move { + while let Ok((mut stream, _)) = listener.accept().await { + let mut buffer = Vec::new(); + let mut reader = BufReader::new(&mut stream); + if reader.read_until(b'\0', &mut buffer).await.is_err() { + buffer.clear(); + } + let password_prompt = String::from_utf8_lossy(&buffer); + if let Some(password) = delegate + .ask_password(password_prompt.to_string(), &mut cx) + .await + .context("failed to get ssh password") + .and_then(|p| p) + .log_err() + { + stream.write_all(password.as_bytes()).await.log_err(); + } } } }); @@ -575,7 +584,22 @@ impl SshClientState { // has completed. let stdout = master_process.stdout.as_mut().unwrap(); let mut output = Vec::new(); - stdout.read_to_end(&mut output).await?; + let connection_timeout = std::time::Duration::from_secs(10); + let result = read_with_timeout(stdout, connection_timeout, &mut output).await; + if let Err(e) = result { + let error_message = if e.kind() == std::io::ErrorKind::TimedOut { + format!( + "Failed to connect to host. Timed out after {:?}.", + connection_timeout + ) + } else { + format!("Failed to connect to host: {}.", e) + }; + + delegate.set_error(error_message, cx); + return Err(e.into()); + } + drop(askpass_task); if master_process.try_status()?.is_some() { @@ -716,6 +740,29 @@ impl SshClientState { } } +#[cfg(unix)] +async fn read_with_timeout( + stdout: &mut process::ChildStdout, + timeout: std::time::Duration, + output: &mut Vec, +) -> Result<(), std::io::Error> { + smol::future::or( + async { + stdout.read_to_end(output).await?; + Ok::<_, std::io::Error>(()) + }, + async { + smol::Timer::after(timeout).await; + + Err(std::io::Error::new( + std::io::ErrorKind::TimedOut, + "Read operation timed out", + )) + }, + ) + .await +} + impl Drop for SshClientState { fn drop(&mut self) { if let Err(error) = self.master_process.kill() { From 568a21a7009ea41c93a97a3382874c3f62816889 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 27 Sep 2024 10:48:34 +0300 Subject: [PATCH 368/762] Fix the numeration in line wrap docs (#18428) Follow-up of https://github.com/zed-industries/zed/pull/18412 Release Notes: - N/A Co-authored-by: Thorsten Ball --- assets/settings/default.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index b3be17ad2cedcf..82778dc8f6c484 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -535,14 +535,14 @@ // How to soft-wrap long lines of text. // Possible values: // - // 2. Prefer a single line generally, unless an overly long line is encountered. + // 1. Prefer a single line generally, unless an overly long line is encountered. // "soft_wrap": "none", // "soft_wrap": "prefer_line", // (deprecated, same as "none") - // 3. Soft wrap lines that overflow the editor. + // 2. Soft wrap lines that overflow the editor. // "soft_wrap": "editor_width", - // 4. Soft wrap lines at the preferred line length. + // 3. Soft wrap lines at the preferred line length. // "soft_wrap": "preferred_line_length", - // 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller). + // 4. Soft wrap lines at the preferred line length or the editor width (whichever is smaller). // "soft_wrap": "bounded", "soft_wrap": "none", // The column at which to soft-wrap lines, for buffers where soft-wrap From a1d2e1106e15aeb072827170f79950f6ce02fd8d Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 27 Sep 2024 10:51:49 +0200 Subject: [PATCH 369/762] assistant: Fix copy/cut not working when selection is empty (#18403) Release Notes: - Fixed copy/cut/paste not working in the assistant panel when selection was empty --- crates/assistant/src/assistant_panel.rs | 108 ++++++++++++++---------- 1 file changed, 63 insertions(+), 45 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index c7a06b428b9f1a..15f9e5c59dfef9 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -72,6 +72,7 @@ use std::{ time::Duration, }; use terminal_view::{terminal_panel::TerminalPanel, TerminalView}; +use text::SelectionGoal; use ui::TintColor; use ui::{ prelude::*, @@ -3438,7 +3439,7 @@ impl ContextEditor { fn copy(&mut self, _: &editor::actions::Copy, cx: &mut ViewContext) { if self.editor.read(cx).selections.count() == 1 { - let (copied_text, metadata) = self.get_clipboard_contents(cx); + let (copied_text, metadata, _) = self.get_clipboard_contents(cx); cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( copied_text, metadata, @@ -3452,11 +3453,9 @@ impl ContextEditor { fn cut(&mut self, _: &editor::actions::Cut, cx: &mut ViewContext) { if self.editor.read(cx).selections.count() == 1 { - let (copied_text, metadata) = self.get_clipboard_contents(cx); + let (copied_text, metadata, selections) = self.get_clipboard_contents(cx); self.editor.update(cx, |editor, cx| { - let selections = editor.selections.all::(cx); - editor.transact(cx, |this, cx| { this.change_selections(Some(Autoscroll::fit()), cx, |s| { s.select(selections); @@ -3476,52 +3475,71 @@ impl ContextEditor { cx.propagate(); } - fn get_clipboard_contents(&mut self, cx: &mut ViewContext) -> (String, CopyMetadata) { - let creases = self.editor.update(cx, |editor, cx| { - let selection = editor.selections.newest::(cx); - let selection_start = editor.selections.newest::(cx).start; + fn get_clipboard_contents( + &mut self, + cx: &mut ViewContext, + ) -> (String, CopyMetadata, Vec>) { + let (snapshot, selection, creases) = self.editor.update(cx, |editor, cx| { + let mut selection = editor.selections.newest::(cx); let snapshot = editor.buffer().read(cx).snapshot(cx); - editor.display_map.update(cx, |display_map, cx| { - display_map - .snapshot(cx) - .crease_snapshot - .creases_in_range( - MultiBufferRow(selection.start.row)..MultiBufferRow(selection.end.row + 1), - &snapshot, - ) - .filter_map(|crease| { - if let Some(metadata) = &crease.metadata { - let start = crease - .range - .start - .to_offset(&snapshot) - .saturating_sub(selection_start); - let end = crease - .range - .end - .to_offset(&snapshot) - .saturating_sub(selection_start); - - let range_relative_to_selection = start..end; - - if range_relative_to_selection.is_empty() { - None + + let is_entire_line = selection.is_empty() || editor.selections.line_mode; + if is_entire_line { + selection.start = Point::new(selection.start.row, 0); + selection.end = + cmp::min(snapshot.max_point(), Point::new(selection.start.row + 1, 0)); + selection.goal = SelectionGoal::None; + } + + let selection_start = snapshot.point_to_offset(selection.start); + + ( + snapshot.clone(), + selection.clone(), + editor.display_map.update(cx, |display_map, cx| { + display_map + .snapshot(cx) + .crease_snapshot + .creases_in_range( + MultiBufferRow(selection.start.row) + ..MultiBufferRow(selection.end.row + 1), + &snapshot, + ) + .filter_map(|crease| { + if let Some(metadata) = &crease.metadata { + let start = crease + .range + .start + .to_offset(&snapshot) + .saturating_sub(selection_start); + let end = crease + .range + .end + .to_offset(&snapshot) + .saturating_sub(selection_start); + + let range_relative_to_selection = start..end; + + if range_relative_to_selection.is_empty() { + None + } else { + Some(SelectedCreaseMetadata { + range_relative_to_selection, + crease: metadata.clone(), + }) + } } else { - Some(SelectedCreaseMetadata { - range_relative_to_selection, - crease: metadata.clone(), - }) + None } - } else { - None - } - }) - .collect::>() - }) + }) + .collect::>() + }), + ) }); + let selection = selection.map(|point| snapshot.point_to_offset(point)); let context = self.context.read(cx); - let selection = self.editor.read(cx).selections.newest::(cx); + let mut text = String::new(); for message in context.messages(cx) { if message.offset_range.start >= selection.range().end { @@ -3540,7 +3558,7 @@ impl ContextEditor { } } - (text, CopyMetadata { creases }) + (text, CopyMetadata { creases }, vec![selection]) } fn paste(&mut self, action: &editor::actions::Paste, cx: &mut ViewContext) { From 1c5d9c221a3ec66f496b762fcc720183240bd212 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 27 Sep 2024 11:06:48 +0200 Subject: [PATCH 370/762] Add missing shortcuts in tooltips (#18282) Fixes some missing shortcuts from Tooltips like the project search, buffer search, quick action bar, .... https://github.com/user-attachments/assets/d3a0160a-8d6e-4ddc-bf82-1fabeca42d59 This should hopefully help new users learn and discover some nice keyboard shortcuts Release Notes: - Display keyboard shortcuts inside tooltips in the project search, buffer search etc. --- assets/keymaps/default-linux.json | 2 +- assets/keymaps/default-macos.json | 2 +- crates/breadcrumbs/src/breadcrumbs.rs | 35 +++++--- .../quick_action_bar/src/quick_action_bar.rs | 15 +++- crates/search/src/buffer_search.rs | 80 ++++++++++++++++--- crates/search/src/project_search.rs | 78 ++++++++++++++++-- crates/search/src/search.rs | 5 +- crates/search/src/search_bar.rs | 5 +- crates/terminal_view/src/terminal_panel.rs | 20 ++++- 9 files changed, 199 insertions(+), 43 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 8d4871d95648db..d33df0274725af 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -196,7 +196,7 @@ } }, { - "context": "BufferSearchBar && in_replace", + "context": "BufferSearchBar && in_replace > Editor", "bindings": { "enter": "search::ReplaceNext", "ctrl-enter": "search::ReplaceAll" diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index a980ae14e22a95..b405ee18528431 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -232,7 +232,7 @@ } }, { - "context": "BufferSearchBar && in_replace", + "context": "BufferSearchBar && in_replace > Editor", "bindings": { "enter": "search::ReplaceNext", "cmd-enter": "search::ReplaceAll" diff --git a/crates/breadcrumbs/src/breadcrumbs.rs b/crates/breadcrumbs/src/breadcrumbs.rs index 93ebfa06435843..09b29c0436f6ed 100644 --- a/crates/breadcrumbs/src/breadcrumbs.rs +++ b/crates/breadcrumbs/src/breadcrumbs.rs @@ -1,7 +1,7 @@ use editor::Editor; use gpui::{ - Element, EventEmitter, IntoElement, ParentElement, Render, StyledText, Subscription, - ViewContext, + Element, EventEmitter, FocusableView, IntoElement, ParentElement, Render, StyledText, + Subscription, ViewContext, }; use itertools::Itertools; use std::cmp; @@ -90,17 +90,30 @@ impl Render for Breadcrumbs { ButtonLike::new("toggle outline view") .child(breadcrumbs_stack) .style(ButtonStyle::Transparent) - .on_click(move |_, cx| { - if let Some(editor) = editor.upgrade() { - outline::toggle(editor, &editor::actions::ToggleOutline, cx) + .on_click({ + let editor = editor.clone(); + move |_, cx| { + if let Some(editor) = editor.upgrade() { + outline::toggle(editor, &editor::actions::ToggleOutline, cx) + } } }) - .tooltip(|cx| { - Tooltip::for_action( - "Show symbol outline", - &editor::actions::ToggleOutline, - cx, - ) + .tooltip(move |cx| { + if let Some(editor) = editor.upgrade() { + let focus_handle = editor.read(cx).focus_handle(cx); + Tooltip::for_action_in( + "Show symbol outline", + &editor::actions::ToggleOutline, + &focus_handle, + cx, + ) + } else { + Tooltip::for_action( + "Show symbol outline", + &editor::actions::ToggleOutline, + cx, + ) + } }), ), None => element diff --git a/crates/quick_action_bar/src/quick_action_bar.rs b/crates/quick_action_bar/src/quick_action_bar.rs index 57418b54b7e05a..fb05065a19fc18 100644 --- a/crates/quick_action_bar/src/quick_action_bar.rs +++ b/crates/quick_action_bar/src/quick_action_bar.rs @@ -8,8 +8,8 @@ use editor::actions::{ use editor::{Editor, EditorSettings}; use gpui::{ - Action, AnchorCorner, ClickEvent, ElementId, EventEmitter, InteractiveElement, ParentElement, - Render, Styled, Subscription, View, ViewContext, WeakView, + Action, AnchorCorner, ClickEvent, ElementId, EventEmitter, FocusHandle, FocusableView, + InteractiveElement, ParentElement, Render, Styled, Subscription, View, ViewContext, WeakView, }; use search::{buffer_search, BufferSearchBar}; use settings::{Settings, SettingsStore}; @@ -110,12 +110,15 @@ impl Render for QuickActionBar { ) }; + let focus_handle = editor.read(cx).focus_handle(cx); + let search_button = editor.is_singleton(cx).then(|| { QuickActionBarButton::new( "toggle buffer search", IconName::MagnifyingGlass, !self.buffer_search_bar.read(cx).is_dismissed(), Box::new(buffer_search::Deploy::find()), + focus_handle.clone(), "Buffer Search", { let buffer_search_bar = self.buffer_search_bar.clone(); @@ -133,6 +136,7 @@ impl Render for QuickActionBar { IconName::ZedAssistant, false, Box::new(InlineAssist::default()), + focus_handle.clone(), "Inline Assist", { let workspace = self.workspace.clone(); @@ -321,6 +325,7 @@ struct QuickActionBarButton { icon: IconName, toggled: bool, action: Box, + focus_handle: FocusHandle, tooltip: SharedString, on_click: Box, } @@ -331,6 +336,7 @@ impl QuickActionBarButton { icon: IconName, toggled: bool, action: Box, + focus_handle: FocusHandle, tooltip: impl Into, on_click: impl Fn(&ClickEvent, &mut WindowContext) + 'static, ) -> Self { @@ -339,6 +345,7 @@ impl QuickActionBarButton { icon, toggled, action, + focus_handle, tooltip: tooltip.into(), on_click: Box::new(on_click), } @@ -355,7 +362,9 @@ impl RenderOnce for QuickActionBarButton { .icon_size(IconSize::Small) .style(ButtonStyle::Subtle) .selected(self.toggled) - .tooltip(move |cx| Tooltip::for_action(tooltip.clone(), &*action, cx)) + .tooltip(move |cx| { + Tooltip::for_action_in(tooltip.clone(), &*action, &self.focus_handle, cx) + }) .on_click(move |event, cx| (self.on_click)(event, cx)) } } diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 9ba7dfd7965b15..6e660a963b987d 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -13,9 +13,10 @@ use editor::{ }; use futures::channel::oneshot; use gpui::{ - actions, div, impl_actions, Action, AppContext, ClickEvent, EventEmitter, FocusableView, Hsla, - InteractiveElement as _, IntoElement, KeyContext, ParentElement as _, Render, ScrollHandle, - Styled, Subscription, Task, TextStyle, View, ViewContext, VisualContext as _, WindowContext, + actions, div, impl_actions, Action, AppContext, ClickEvent, EventEmitter, FocusHandle, + FocusableView, Hsla, InteractiveElement as _, IntoElement, KeyContext, ParentElement as _, + Render, ScrollHandle, Styled, Subscription, Task, TextStyle, View, ViewContext, + VisualContext as _, WindowContext, }; use project::{ search::SearchQuery, @@ -142,6 +143,8 @@ impl Render for BufferSearchBar { return div().id("search_bar"); } + let focus_handle = self.focus_handle(cx); + let narrow_mode = self.scroll_handle.bounds().size.width / cx.rem_size() < 340. / BASE_REM_SIZE_IN_PX; let hide_inline_icons = self.editor_needed_width @@ -217,6 +220,7 @@ impl Render for BufferSearchBar { div.children(supported_options.case.then(|| { self.render_search_option_button( SearchOptions::CASE_SENSITIVE, + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_case_sensitive(&ToggleCaseSensitive, cx) }), @@ -225,6 +229,7 @@ impl Render for BufferSearchBar { .children(supported_options.word.then(|| { self.render_search_option_button( SearchOptions::WHOLE_WORD, + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_whole_word(&ToggleWholeWord, cx) }), @@ -233,6 +238,7 @@ impl Render for BufferSearchBar { .children(supported_options.regex.then(|| { self.render_search_option_button( SearchOptions::REGEX, + focus_handle.clone(), cx.listener(|this, _, cx| this.toggle_regex(&ToggleRegex, cx)), ) })) @@ -250,7 +256,17 @@ impl Render for BufferSearchBar { })) .selected(self.replace_enabled) .size(ButtonSize::Compact) - .tooltip(|cx| Tooltip::for_action("Toggle replace", &ToggleReplace, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle replace", + &ToggleReplace, + &focus_handle, + cx, + ) + } + }), ) }) .when(supported_options.selection, |this| { @@ -268,8 +284,16 @@ impl Render for BufferSearchBar { })) .selected(self.selection_search_enabled) .size(ButtonSize::Compact) - .tooltip(|cx| { - Tooltip::for_action("Toggle search selection", &ToggleSelection, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle search selection", + &ToggleSelection, + &focus_handle, + cx, + ) + } }), ) }) @@ -280,8 +304,16 @@ impl Render for BufferSearchBar { IconButton::new("select-all", ui::IconName::SelectAll) .on_click(|_, cx| cx.dispatch_action(SelectAllMatches.boxed_clone())) .size(ButtonSize::Compact) - .tooltip(|cx| { - Tooltip::for_action("Select all matches", &SelectAllMatches, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Select all matches", + &SelectAllMatches, + &focus_handle, + cx, + ) + } }), ) .child(render_nav_button( @@ -289,12 +321,14 @@ impl Render for BufferSearchBar { self.active_match_index.is_some(), "Select previous match", &SelectPrevMatch, + focus_handle.clone(), )) .child(render_nav_button( ui::IconName::ChevronRight, self.active_match_index.is_some(), "Select next match", &SelectNextMatch, + focus_handle.clone(), )) .when(!narrow_mode, |this| { this.child(h_flex().ml_2().min_w(rems_from_px(40.)).child( @@ -335,8 +369,16 @@ impl Render for BufferSearchBar { .flex_none() .child( IconButton::new("search-replace-next", ui::IconName::ReplaceNext) - .tooltip(move |cx| { - Tooltip::for_action("Replace next", &ReplaceNext, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Replace next match", + &ReplaceNext, + &focus_handle, + cx, + ) + } }) .on_click( cx.listener(|this, _, cx| this.replace_next(&ReplaceNext, cx)), @@ -344,8 +386,16 @@ impl Render for BufferSearchBar { ) .child( IconButton::new("search-replace-all", ui::IconName::ReplaceAll) - .tooltip(move |cx| { - Tooltip::for_action("Replace all", &ReplaceAll, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Replace all matches", + &ReplaceAll, + &focus_handle, + cx, + ) + } }) .on_click( cx.listener(|this, _, cx| this.replace_all(&ReplaceAll, cx)), @@ -719,10 +769,11 @@ impl BufferSearchBar { fn render_search_option_button( &self, option: SearchOptions, + focus_handle: FocusHandle, action: impl Fn(&ClickEvent, &mut WindowContext) + 'static, ) -> impl IntoElement { let is_active = self.search_options.contains(option); - option.as_button(is_active, action) + option.as_button(is_active, focus_handle, action) } pub fn focus_editor(&mut self, _: &FocusEditor, cx: &mut ViewContext) { @@ -1122,6 +1173,7 @@ impl BufferSearchBar { }); cx.focus(handle); } + fn toggle_replace(&mut self, _: &ToggleReplace, cx: &mut ViewContext) { if self.active_searchable_item.is_some() { self.replace_enabled = !self.replace_enabled; @@ -1134,6 +1186,7 @@ impl BufferSearchBar { cx.notify(); } } + fn replace_next(&mut self, _: &ReplaceNext, cx: &mut ViewContext) { let mut should_propagate = true; if !self.dismissed && self.active_search.is_some() { @@ -1161,6 +1214,7 @@ impl BufferSearchBar { cx.stop_propagation(); } } + pub fn replace_all(&mut self, _: &ReplaceAll, cx: &mut ViewContext) { if !self.dismissed && self.active_search.is_some() { if let Some(searchable_item) = self.active_searchable_item.as_ref() { diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index ea94d27daf61d5..12e6ccc12dc496 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -1551,6 +1551,7 @@ impl Render for ProjectSearchBar { return div(); }; let search = search.read(cx); + let focus_handle = search.focus_handle(cx); let query_column = h_flex() .flex_1() @@ -1571,18 +1572,21 @@ impl Render for ProjectSearchBar { h_flex() .child(SearchOptions::CASE_SENSITIVE.as_button( self.is_option_enabled(SearchOptions::CASE_SENSITIVE, cx), + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_search_option(SearchOptions::CASE_SENSITIVE, cx); }), )) .child(SearchOptions::WHOLE_WORD.as_button( self.is_option_enabled(SearchOptions::WHOLE_WORD, cx), + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_search_option(SearchOptions::WHOLE_WORD, cx); }), )) .child(SearchOptions::REGEX.as_button( self.is_option_enabled(SearchOptions::REGEX, cx), + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_search_option(SearchOptions::REGEX, cx); }), @@ -1603,7 +1607,17 @@ impl Render for ProjectSearchBar { .map(|search| search.read(cx).filters_enabled) .unwrap_or_default(), ) - .tooltip(|cx| Tooltip::for_action("Toggle filters", &ToggleFilters, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle filters", + &ToggleFilters, + &focus_handle, + cx, + ) + } + }), ) .child( IconButton::new("project-search-toggle-replace", IconName::Replace) @@ -1616,7 +1630,17 @@ impl Render for ProjectSearchBar { .map(|search| search.read(cx).replace_enabled) .unwrap_or_default(), ) - .tooltip(|cx| Tooltip::for_action("Toggle replace", &ToggleReplace, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle replace", + &ToggleReplace, + &focus_handle, + cx, + ) + } + }), ), ); @@ -1650,8 +1674,16 @@ impl Render for ProjectSearchBar { }) } })) - .tooltip(|cx| { - Tooltip::for_action("Go to previous match", &SelectPrevMatch, cx) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Go to previous match", + &SelectPrevMatch, + &focus_handle, + cx, + ) + } }), ) .child( @@ -1664,7 +1696,17 @@ impl Render for ProjectSearchBar { }) } })) - .tooltip(|cx| Tooltip::for_action("Go to next match", &SelectNextMatch, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Go to next match", + &SelectNextMatch, + &focus_handle, + cx, + ) + } + }), ) .child( h_flex() @@ -1702,6 +1744,7 @@ impl Render for ProjectSearchBar { .border_color(cx.theme().colors().border) .rounded_lg() .child(self.render_text_input(&search.replacement_editor, cx)); + let focus_handle = search.replacement_editor.read(cx).focus_handle(cx); let replace_actions = h_flex().when(search.replace_enabled, |this| { this.child( IconButton::new("project-search-replace-next", IconName::ReplaceNext) @@ -1712,7 +1755,17 @@ impl Render for ProjectSearchBar { }) } })) - .tooltip(|cx| Tooltip::for_action("Replace next match", &ReplaceNext, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Replace next match", + &ReplaceNext, + &focus_handle, + cx, + ) + } + }), ) .child( IconButton::new("project-search-replace-all", IconName::ReplaceAll) @@ -1723,7 +1776,17 @@ impl Render for ProjectSearchBar { }) } })) - .tooltip(|cx| Tooltip::for_action("Replace all matches", &ReplaceAll, cx)), + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Replace all matches", + &ReplaceAll, + &focus_handle, + cx, + ) + } + }), ) }); h_flex() @@ -1790,6 +1853,7 @@ impl Render for ProjectSearchBar { search .search_options .contains(SearchOptions::INCLUDE_IGNORED), + focus_handle.clone(), cx.listener(|this, _, cx| { this.toggle_search_option(SearchOptions::INCLUDE_IGNORED, cx); }), diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index b99672c532d6bb..d13a12576b0f94 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -1,7 +1,7 @@ use bitflags::bitflags; pub use buffer_search::BufferSearchBar; use editor::SearchSettings; -use gpui::{actions, Action, AppContext, IntoElement}; +use gpui::{actions, Action, AppContext, FocusHandle, IntoElement}; use project::search::SearchQuery; pub use project_search::ProjectSearchView; use ui::{prelude::*, Tooltip}; @@ -106,6 +106,7 @@ impl SearchOptions { pub fn as_button( &self, active: bool, + focus_handle: FocusHandle, action: impl Fn(&gpui::ClickEvent, &mut WindowContext) + 'static, ) -> impl IntoElement { IconButton::new(self.label(), self.icon()) @@ -115,7 +116,7 @@ impl SearchOptions { .tooltip({ let action = self.to_toggle_action(); let label = self.label(); - move |cx| Tooltip::for_action(label, &*action, cx) + move |cx| Tooltip::for_action_in(label, &*action, &focus_handle, cx) }) } } diff --git a/crates/search/src/search_bar.rs b/crates/search/src/search_bar.rs index 0594036c25483c..102f04c4b95c28 100644 --- a/crates/search/src/search_bar.rs +++ b/crates/search/src/search_bar.rs @@ -1,4 +1,4 @@ -use gpui::{Action, IntoElement}; +use gpui::{Action, FocusHandle, IntoElement}; use ui::IconButton; use ui::{prelude::*, Tooltip}; @@ -7,12 +7,13 @@ pub(super) fn render_nav_button( active: bool, tooltip: &'static str, action: &'static dyn Action, + focus_handle: FocusHandle, ) -> impl IntoElement { IconButton::new( SharedString::from(format!("search-nav-button-{}", action.name())), icon, ) .on_click(|_, cx| cx.dispatch_action(action.boxed_clone())) - .tooltip(move |cx| Tooltip::for_action(tooltip, action, cx)) + .tooltip(move |cx| Tooltip::for_action_in(tooltip, action, &focus_handle, cx)) .disabled(!active) } diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 72f8606fa21765..7d95613804414c 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -166,7 +166,16 @@ impl TerminalPanel { pub fn asssistant_enabled(&mut self, enabled: bool, cx: &mut ViewContext) { self.assistant_enabled = enabled; if enabled { - self.assistant_tab_bar_button = Some(cx.new_view(|_| InlineAssistTabBarButton).into()); + let focus_handle = self + .pane + .read(cx) + .active_item() + .map(|item| item.focus_handle(cx)) + .unwrap_or(self.focus_handle(cx)); + self.assistant_tab_bar_button = Some( + cx.new_view(move |_| InlineAssistTabBarButton { focus_handle }) + .into(), + ); } else { self.assistant_tab_bar_button = None; } @@ -859,16 +868,21 @@ impl Panel for TerminalPanel { } } -struct InlineAssistTabBarButton; +struct InlineAssistTabBarButton { + focus_handle: FocusHandle, +} impl Render for InlineAssistTabBarButton { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let focus_handle = self.focus_handle.clone(); IconButton::new("terminal_inline_assistant", IconName::ZedAssistant) .icon_size(IconSize::Small) .on_click(cx.listener(|_, _, cx| { cx.dispatch_action(InlineAssist::default().boxed_clone()); })) - .tooltip(move |cx| Tooltip::for_action("Inline Assist", &InlineAssist::default(), cx)) + .tooltip(move |cx| { + Tooltip::for_action_in("Inline Assist", &InlineAssist::default(), &focus_handle, cx) + }) } } From 73ff8c0f1fd19ffcf93bc0bc2b8ba9d05f5eb935 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 27 Sep 2024 14:16:14 +0200 Subject: [PATCH 371/762] Fix missing tooltips for selected buttons (#18435) Reverts #13857. Hiding tooltips for selected buttons prevents tooltips like "Close x dock" from showing up, see #14938 for an example. The intention of the original PR was to hide the "Show application menu" tooltip, while the context menu is open. In order to fix this without breaking other UI elements, we track the state of the context menu using `PopoverMenuHandle` now, which allows us to prevent the tooltip from showing up while the context menu is open. Closes #14938 Release Notes: - Fixed an issue where some tooltips would not show up --- crates/storybook/src/story_selector.rs | 4 +++- crates/title_bar/src/application_menu.rs | 24 ++++++++++++------- .../title_bar/src/stories/application_menu.rs | 16 ++++++++++--- crates/title_bar/src/title_bar.rs | 22 ++++++++++------- .../ui/src/components/button/button_like.rs | 6 ++--- 5 files changed, 47 insertions(+), 25 deletions(-) diff --git a/crates/storybook/src/story_selector.rs b/crates/storybook/src/story_selector.rs index 881fd83f8f21b9..3a1c2f56306d89 100644 --- a/crates/storybook/src/story_selector.rs +++ b/crates/storybook/src/story_selector.rs @@ -46,7 +46,9 @@ pub enum ComponentStory { impl ComponentStory { pub fn story(&self, cx: &mut WindowContext) -> AnyView { match self { - Self::ApplicationMenu => cx.new_view(|_| title_bar::ApplicationMenuStory).into(), + Self::ApplicationMenu => cx + .new_view(|cx| title_bar::ApplicationMenuStory::new(cx)) + .into(), Self::AutoHeightEditor => AutoHeightEditorStory::new(cx).into(), Self::Avatar => cx.new_view(|_| ui::AvatarStory).into(), Self::Button => cx.new_view(|_| ui::ButtonStory).into(), diff --git a/crates/title_bar/src/application_menu.rs b/crates/title_bar/src/application_menu.rs index 47d4818da5e92b..13ee10c141073e 100644 --- a/crates/title_bar/src/application_menu.rs +++ b/crates/title_bar/src/application_menu.rs @@ -1,16 +1,19 @@ -use ui::{prelude::*, ContextMenu, NumericStepper, PopoverMenu, Tooltip}; +use ui::{prelude::*, ContextMenu, NumericStepper, PopoverMenu, PopoverMenuHandle, Tooltip}; -#[derive(IntoElement)] -pub struct ApplicationMenu; +pub struct ApplicationMenu { + context_menu_handle: PopoverMenuHandle, +} impl ApplicationMenu { - pub fn new() -> Self { - Self + pub fn new(_: &mut ViewContext) -> Self { + Self { + context_menu_handle: PopoverMenuHandle::default(), + } } } -impl RenderOnce for ApplicationMenu { - fn render(self, _cx: &mut WindowContext) -> impl IntoElement { +impl Render for ApplicationMenu { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { PopoverMenu::new("application-menu") .menu(move |cx| { ContextMenu::build(cx, move |menu, cx| { @@ -125,9 +128,12 @@ impl RenderOnce for ApplicationMenu { .trigger( IconButton::new("application-menu", ui::IconName::Menu) .style(ButtonStyle::Subtle) - .tooltip(|cx| Tooltip::text("Open Application Menu", cx)) - .icon_size(IconSize::Small), + .icon_size(IconSize::Small) + .when(!self.context_menu_handle.is_deployed(), |this| { + this.tooltip(|cx| Tooltip::text("Open Application Menu", cx)) + }), ) + .with_handle(self.context_menu_handle.clone()) .into_any_element() } } diff --git a/crates/title_bar/src/stories/application_menu.rs b/crates/title_bar/src/stories/application_menu.rs index 0b804209fd1a33..c3f8c700ae6f59 100644 --- a/crates/title_bar/src/stories/application_menu.rs +++ b/crates/title_bar/src/stories/application_menu.rs @@ -1,11 +1,21 @@ -use gpui::Render; +use gpui::{Render, View}; use story::{Story, StoryItem, StorySection}; use ui::prelude::*; use crate::application_menu::ApplicationMenu; -pub struct ApplicationMenuStory; +pub struct ApplicationMenuStory { + menu: View, +} + +impl ApplicationMenuStory { + pub fn new(cx: &mut WindowContext) -> Self { + Self { + menu: cx.new_view(ApplicationMenu::new), + } + } +} impl Render for ApplicationMenuStory { fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { @@ -13,7 +23,7 @@ impl Render for ApplicationMenuStory { .child(Story::title_for::()) .child(StorySection::new().child(StoryItem::new( "Application Menu", - h_flex().child(ApplicationMenu::new()), + h_flex().child(self.menu.clone()), ))) } } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index e2d45a923b7d06..73a82e9ee06201 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -15,7 +15,7 @@ use feature_flags::{FeatureFlagAppExt, ZedPro}; use gpui::{ actions, div, px, Action, AnyElement, AppContext, Decorations, Element, InteractiveElement, Interactivity, IntoElement, Model, MouseButton, ParentElement, Render, Stateful, - StatefulInteractiveElement, Styled, Subscription, ViewContext, VisualContext, WeakView, + StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView, }; use project::{Project, RepositoryEntry}; use recent_projects::RecentProjects; @@ -65,6 +65,7 @@ pub struct TitleBar { client: Arc, workspace: WeakView, should_move: bool, + application_menu: Option>, _subscriptions: Vec, } @@ -131,12 +132,7 @@ impl Render for TitleBar { .child( h_flex() .gap_1() - .children(match self.platform_style { - PlatformStyle::Mac => None, - PlatformStyle::Linux | PlatformStyle::Windows => { - Some(ApplicationMenu::new()) - } - }) + .when_some(self.application_menu.clone(), |this, menu| this.child(menu)) .children(self.render_project_host(cx)) .child(self.render_project_name(cx)) .children(self.render_project_branch(cx)) @@ -215,6 +211,15 @@ impl TitleBar { let user_store = workspace.app_state().user_store.clone(); let client = workspace.app_state().client.clone(); let active_call = ActiveCall::global(cx); + + let platform_style = PlatformStyle::platform(); + let application_menu = match platform_style { + PlatformStyle::Mac => None, + PlatformStyle::Linux | PlatformStyle::Windows => { + Some(cx.new_view(ApplicationMenu::new)) + } + }; + let mut subscriptions = Vec::new(); subscriptions.push( cx.observe(&workspace.weak_handle().upgrade().unwrap(), |_, _, cx| { @@ -227,9 +232,10 @@ impl TitleBar { subscriptions.push(cx.observe(&user_store, |_, _, cx| cx.notify())); Self { - platform_style: PlatformStyle::platform(), + platform_style, content: div().id(id.into()), children: SmallVec::new(), + application_menu, workspace: workspace.weak_handle(), should_move: false, project, diff --git a/crates/ui/src/components/button/button_like.rs b/crates/ui/src/components/button/button_like.rs index 625875e4c9eb92..a22c27d24176d0 100644 --- a/crates/ui/src/components/button/button_like.rs +++ b/crates/ui/src/components/button/button_like.rs @@ -523,10 +523,8 @@ impl RenderOnce for ButtonLike { }) }, ) - .when(!self.selected, |this| { - this.when_some(self.tooltip, |this, tooltip| { - this.tooltip(move |cx| tooltip(cx)) - }) + .when_some(self.tooltip, |this, tooltip| { + this.tooltip(move |cx| tooltip(cx)) }) .children(self.children) } From 03c7f085812a2ffddbf0673f04804851f11230ce Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 27 Sep 2024 10:29:49 -0400 Subject: [PATCH 372/762] docs: Ollama api_url improvements (#18440) --- docs/src/assistant/configuration.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index 17b52a27d88010..9d9c62d8c6bc56 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -124,7 +124,7 @@ Download and install Ollama from [ollama.com/download](https://ollama.com/downlo 3. In the assistant panel, select one of the Ollama models using the model dropdown. -4. (Optional) Specify a [custom api_url](#custom-endpoint) or [custom `low_speed_timeout_in_seconds`](#provider-timeout) if required. +4. (Optional) Specify an [`api_url`](#custom-endpoint) or [`low_speed_timeout_in_seconds`](#provider-timeout) if required. #### Ollama Context Length {#ollama-context} @@ -138,6 +138,7 @@ Depending on your hardware or use-case you may wish to limit or increase the con { "language_models": { "ollama": { + "api_url": "http://localhost:11434", "low_speed_timeout_in_seconds": 120, "available_models": [ { From dc5ffe6994b0f2467164a7e1dd77dada96b64980 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=96mer=20Sinan=20A=C4=9Facan?= Date: Fri, 27 Sep 2024 16:55:03 +0200 Subject: [PATCH 373/762] Fix GoToDefinition changing the viewport unnecessarily (#18441) Closes #10738. Release Notes: - Fixed `GoToDefinition` changing the viewport (scrolling up/down) even when the definition is already within the viewport. ([#10738](https://github.com/zed-industries/zed/issues/10738)) --- crates/editor/src/editor.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 44de6014ec8137..cfffa584b6c214 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -9697,7 +9697,7 @@ impl Editor { if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() { let buffer = target.buffer.read(cx); let range = check_multiline_range(buffer, range); - editor.change_selections(Some(Autoscroll::focused()), cx, |s| { + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { s.select_ranges([range]); }); } else { From 6d4ecac6100f7908278e78cb8c5102f7f91c54c5 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 27 Sep 2024 10:59:19 -0400 Subject: [PATCH 374/762] Add a `get-release-notes-since` script (#18445) Release Notes: - N/A --- ...-changes-since => get-pull-requests-since} | 4 +- script/get-release-notes-since | 125 ++++++++++++++++++ 2 files changed, 127 insertions(+), 2 deletions(-) rename script/{get-changes-since => get-pull-requests-since} (94%) create mode 100755 script/get-release-notes-since diff --git a/script/get-changes-since b/script/get-pull-requests-since similarity index 94% rename from script/get-changes-since rename to script/get-pull-requests-since index 3b889ce991ade6..c8509480a6c5cd 100755 --- a/script/get-changes-since +++ b/script/get-pull-requests-since @@ -22,12 +22,12 @@ async function main() { const startDate = new Date(process.argv[2]); const today = new Date(); - console.log(`Changes from ${startDate} to ${today}\n`); + console.log(`Pull requests from ${startDate} to ${today}\n`); let pullRequestNumbers = getPullRequestNumbers(startDate, today); // Fetch the pull requests from the GitHub API. - console.log("Merged Pull requests:"); + console.log("Merged pull requests:"); for (const pullRequestNumber of pullRequestNumbers) { const webURL = `https://github.com/zed-industries/zed/pull/${pullRequestNumber}`; const apiURL = `https://api.github.com/repos/zed-industries/zed/pulls/${pullRequestNumber}`; diff --git a/script/get-release-notes-since b/script/get-release-notes-since new file mode 100755 index 00000000000000..20a6fc18dee9e3 --- /dev/null +++ b/script/get-release-notes-since @@ -0,0 +1,125 @@ +#!/usr/bin/env node --redirect-warnings=/dev/null + +const { execFileSync } = require("child_process"); +const { GITHUB_ACCESS_TOKEN } = process.env; + +main(); + +async function main() { + const startDate = new Date(process.argv[2]); + const today = new Date(); + + console.log(`Release notes from ${startDate} to ${today}\n`); + + const releases = await getReleases(startDate, today); + const previewReleases = releases.filter((release) => + release.tagName.includes("-pre"), + ); + + const stableReleases = releases.filter( + (release) => !release.tagName.includes("-pre"), + ); + + // Filter out all preview release, as all of those changes have made it to the stable release, except for the latest preview release + const aggregatedReleases = stableReleases + .concat(previewReleases[0]) + .reverse(); + + const aggregatedReleaseTitles = aggregatedReleases + .map((release) => release.name) + .join(", "); + + console.log(); + console.log(`Release titles: ${aggregatedReleaseTitles}`); + + console.log("Release notes:"); + console.log(); + + for (const release of aggregatedReleases) { + const publishedDate = release.publishedAt.split("T")[0]; + console.log(`${release.name}: ${publishedDate}`); + console.log(); + console.log(release.description); + console.log(); + } +} + +async function getReleases(startDate, endDate) { + const query = ` + query ($owner: String!, $repo: String!, $cursor: String) { + repository(owner: $owner, name: $repo) { + releases(first: 100, orderBy: {field: CREATED_AT, direction: DESC}, after: $cursor) { + nodes { + tagName + name + createdAt + publishedAt + description + url + author { + login + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } + `; + + let allReleases = []; + let hasNextPage = true; + let cursor = null; + + while (hasNextPage) { + const response = await fetch("https://api.github.com/graphql", { + method: "POST", + headers: { + Authorization: `Bearer ${GITHUB_ACCESS_TOKEN}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + query, + variables: { owner: "zed-industries", repo: "zed", cursor }, + }), + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const data = await response.json(); + + if (data.errors) { + throw new Error(`GraphQL error: ${JSON.stringify(data.errors)}`); + } + + if (!data.data || !data.data.repository || !data.data.repository.releases) { + throw new Error(`Unexpected response structure: ${JSON.stringify(data)}`); + } + + const releases = data.data.repository.releases.nodes; + allReleases = allReleases.concat(releases); + + hasNextPage = data.data.repository.releases.pageInfo.hasNextPage; + cursor = data.data.repository.releases.pageInfo.endCursor; + + lastReleaseOnPage = releases[releases.length - 1]; + + if ( + releases.length > 0 && + new Date(lastReleaseOnPage.createdAt) < startDate + ) { + break; + } + } + + const filteredReleases = allReleases.filter((release) => { + const releaseDate = new Date(release.createdAt); + return releaseDate >= startDate && releaseDate <= endDate; + }); + + return filteredReleases; +} From ffd1083cc185ddd13760391dc81231c18236e912 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Sep 2024 10:06:19 -0600 Subject: [PATCH 375/762] vim: Command selection fixes (#18424) Release Notes: - vim: Fixed cursor position after `:{range}yank`. - vim: Added `:fo[ld]`, `:foldo[pen]` and `:foldc[lose]` --- crates/vim/src/command.rs | 260 +++++++++++++++++++++++----------- crates/vim/src/indent.rs | 8 +- crates/vim/src/normal.rs | 6 +- crates/vim/src/normal/yank.rs | 32 ++++- crates/vim/src/visual.rs | 14 +- 5 files changed, 218 insertions(+), 102 deletions(-) diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 49e739faadfeaf..605bc3a05e43c8 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1,4 +1,9 @@ -use std::{iter::Peekable, ops::Range, str::Chars, sync::OnceLock}; +use std::{ + iter::Peekable, + ops::{Deref, Range}, + str::Chars, + sync::OnceLock, +}; use anyhow::{anyhow, Result}; use command_palette_hooks::CommandInterceptResult; @@ -21,7 +26,7 @@ use crate::{ JoinLines, }, state::Mode, - visual::{VisualDeleteLine, VisualYankLine}, + visual::VisualDeleteLine, Vim, }; @@ -30,38 +35,55 @@ pub struct GoToLine { range: CommandRange, } -#[derive(Debug)] +#[derive(Debug, Clone, PartialEq, Deserialize)] +pub struct YankCommand { + range: CommandRange, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] pub struct WithRange { - is_count: bool, + restore_selection: bool, range: CommandRange, - action: Box, + action: WrappedAction, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +pub struct WithCount { + count: u32, + action: WrappedAction, } +#[derive(Debug)] +struct WrappedAction(Box); + actions!(vim, [VisualCommand, CountCommand]); -impl_actions!(vim, [GoToLine, WithRange]); +impl_actions!(vim, [GoToLine, YankCommand, WithRange, WithCount]); -impl<'de> Deserialize<'de> for WithRange { +impl<'de> Deserialize<'de> for WrappedAction { fn deserialize(_: D) -> Result where D: serde::Deserializer<'de>, { - Err(serde::de::Error::custom("Cannot deserialize WithRange")) + Err(serde::de::Error::custom("Cannot deserialize WrappedAction")) } } -impl PartialEq for WithRange { +impl PartialEq for WrappedAction { fn eq(&self, other: &Self) -> bool { - self.range == other.range && self.action.partial_eq(&*other.action) + self.0.partial_eq(&*other.0) } } -impl Clone for WithRange { +impl Clone for WrappedAction { fn clone(&self) -> Self { - Self { - is_count: self.is_count, - range: self.range.clone(), - action: self.action.boxed_clone(), - } + Self(self.0.boxed_clone()) + } +} + +impl Deref for WrappedAction { + type Target = dyn Action; + fn deref(&self) -> &dyn Action { + &*self.0 } } @@ -110,13 +132,33 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.move_cursor(Motion::StartOfDocument, Some(buffer_row.0 as usize + 1), cx); }); - Vim::action(editor, cx, |vim, action: &WithRange, cx| { - if action.is_count { - for _ in 0..action.range.as_count() { - cx.dispatch_action(action.action.boxed_clone()) + Vim::action(editor, cx, |vim, action: &YankCommand, cx| { + vim.update_editor(cx, |vim, editor, cx| { + let snapshot = editor.snapshot(cx); + if let Ok(range) = action.range.buffer_range(vim, editor, cx) { + let end = if range.end < snapshot.max_buffer_row() { + Point::new(range.end.0 + 1, 0) + } else { + snapshot.buffer_snapshot.max_point() + }; + vim.copy_ranges( + editor, + true, + true, + vec![Point::new(range.start.0, 0)..end], + cx, + ) } - return; + }); + }); + + Vim::action(editor, cx, |_, action: &WithCount, cx| { + for _ in 0..action.count { + cx.dispatch_action(action.action.boxed_clone()) } + }); + + Vim::action(editor, cx, |vim, action: &WithRange, cx| { let result = vim.update_editor(cx, |vim, editor, cx| { action.range.buffer_range(vim, editor, cx) }); @@ -134,31 +176,51 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { } Some(Ok(result)) => result, }; - vim.update_editor(cx, |_, editor, cx| { - editor.change_selections(None, cx, |s| { - let end = Point::new(range.end.0, s.buffer().line_len(range.end)); - s.select_ranges([end..Point::new(range.start.0, 0)]); + + let previous_selections = vim + .update_editor(cx, |_, editor, cx| { + let selections = action + .restore_selection + .then(|| editor.selections.disjoint_anchor_ranges()); + editor.change_selections(None, cx, |s| { + let end = Point::new(range.end.0, s.buffer().line_len(range.end)); + s.select_ranges([end..Point::new(range.start.0, 0)]); + }); + selections }) - }); + .flatten(); cx.dispatch_action(action.action.boxed_clone()); cx.defer(move |vim, cx| { vim.update_editor(cx, |_, editor, cx| { editor.change_selections(None, cx, |s| { - s.select_ranges([Point::new(range.start.0, 0)..Point::new(range.start.0, 0)]); + if let Some(previous_selections) = previous_selections { + s.select_ranges(previous_selections); + } else { + s.select_ranges([ + Point::new(range.start.0, 0)..Point::new(range.start.0, 0) + ]); + } }) }); }); }); } -#[derive(Debug, Default)] +#[derive(Default)] struct VimCommand { prefix: &'static str, suffix: &'static str, action: Option>, action_name: Option<&'static str>, bang_action: Option>, - has_range: bool, + range: Option< + Box< + dyn Fn(Box, &CommandRange) -> Option> + + Send + + Sync + + 'static, + >, + >, has_count: bool, } @@ -187,16 +249,25 @@ impl VimCommand { self } - fn range(mut self) -> Self { - self.has_range = true; + fn range( + mut self, + f: impl Fn(Box, &CommandRange) -> Option> + Send + Sync + 'static, + ) -> Self { + self.range = Some(Box::new(f)); self } + fn count(mut self) -> Self { self.has_count = true; self } - fn parse(&self, mut query: &str, cx: &AppContext) -> Option> { + fn parse( + &self, + mut query: &str, + range: &Option, + cx: &AppContext, + ) -> Option> { let has_bang = query.ends_with('!'); if has_bang { query = &query[..query.len() - 1]; @@ -207,14 +278,20 @@ impl VimCommand { return None; } - if has_bang && self.bang_action.is_some() { - Some(self.bang_action.as_ref().unwrap().boxed_clone()) + let action = if has_bang && self.bang_action.is_some() { + self.bang_action.as_ref().unwrap().boxed_clone() } else if let Some(action) = self.action.as_ref() { - Some(action.boxed_clone()) + action.boxed_clone() } else if let Some(action_name) = self.action_name { - cx.build_action(action_name, None).log_err() + cx.build_action(action_name, None).log_err()? } else { - None + return None; + }; + + if let Some(range) = range { + self.range.as_ref().and_then(|f| f(action, range)) + } else { + Some(action) } } @@ -405,27 +482,17 @@ impl CommandRange { } } - pub fn as_count(&self) -> u32 { + pub fn as_count(&self) -> Option { if let CommandRange { start: Position::Line { row, offset: 0 }, end: None, } = &self { - *row + Some(*row) } else { - 0 + None } } - - pub fn is_count(&self) -> bool { - matches!( - &self, - CommandRange { - start: Position::Line { row: _, offset: 0 }, - end: None - } - ) - } } fn generate_commands(_: &AppContext) -> Vec { @@ -578,18 +645,32 @@ fn generate_commands(_: &AppContext) -> Vec { VimCommand::str(("cl", "ist"), "diagnostics::Deploy"), VimCommand::new(("cc", ""), editor::actions::Hover), VimCommand::new(("ll", ""), editor::actions::Hover), - VimCommand::new(("cn", "ext"), editor::actions::GoToDiagnostic).count(), - VimCommand::new(("cp", "revious"), editor::actions::GoToPrevDiagnostic).count(), - VimCommand::new(("cN", "ext"), editor::actions::GoToPrevDiagnostic).count(), - VimCommand::new(("lp", "revious"), editor::actions::GoToPrevDiagnostic).count(), - VimCommand::new(("lN", "ext"), editor::actions::GoToPrevDiagnostic).count(), - VimCommand::new(("j", "oin"), JoinLines).range(), - VimCommand::new(("dif", "fupdate"), editor::actions::ToggleHunkDiff).range(), - VimCommand::new(("rev", "ert"), editor::actions::RevertSelectedHunks).range(), - VimCommand::new(("d", "elete"), VisualDeleteLine).range(), - VimCommand::new(("y", "ank"), VisualYankLine).range(), - VimCommand::new(("sor", "t"), SortLinesCaseSensitive).range(), - VimCommand::new(("sort i", ""), SortLinesCaseInsensitive).range(), + VimCommand::new(("cn", "ext"), editor::actions::GoToDiagnostic).range(wrap_count), + VimCommand::new(("cp", "revious"), editor::actions::GoToPrevDiagnostic).range(wrap_count), + VimCommand::new(("cN", "ext"), editor::actions::GoToPrevDiagnostic).range(wrap_count), + VimCommand::new(("lp", "revious"), editor::actions::GoToPrevDiagnostic).range(wrap_count), + VimCommand::new(("lN", "ext"), editor::actions::GoToPrevDiagnostic).range(wrap_count), + VimCommand::new(("j", "oin"), JoinLines).range(select_range), + VimCommand::new(("fo", "ld"), editor::actions::FoldSelectedRanges).range(act_on_range), + VimCommand::new(("foldo", "pen"), editor::actions::UnfoldLines) + .bang(editor::actions::UnfoldRecursive) + .range(act_on_range), + VimCommand::new(("foldc", "lose"), editor::actions::Fold) + .bang(editor::actions::FoldRecursive) + .range(act_on_range), + VimCommand::new(("dif", "fupdate"), editor::actions::ToggleHunkDiff).range(act_on_range), + VimCommand::new(("rev", "ert"), editor::actions::RevertSelectedHunks).range(act_on_range), + VimCommand::new(("d", "elete"), VisualDeleteLine).range(select_range), + VimCommand::new(("y", "ank"), gpui::NoAction).range(|_, range| { + Some( + YankCommand { + range: range.clone(), + } + .boxed_clone(), + ) + }), + VimCommand::new(("sor", "t"), SortLinesCaseSensitive).range(select_range), + VimCommand::new(("sort i", ""), SortLinesCaseInsensitive).range(select_range), VimCommand::str(("E", "xplore"), "project_panel::ToggleFocus"), VimCommand::str(("H", "explore"), "project_panel::ToggleFocus"), VimCommand::str(("L", "explore"), "project_panel::ToggleFocus"), @@ -620,6 +701,38 @@ fn commands(cx: &AppContext) -> &Vec { .0 } +fn act_on_range(action: Box, range: &CommandRange) -> Option> { + Some( + WithRange { + restore_selection: true, + range: range.clone(), + action: WrappedAction(action), + } + .boxed_clone(), + ) +} + +fn select_range(action: Box, range: &CommandRange) -> Option> { + Some( + WithRange { + restore_selection: false, + range: range.clone(), + action: WrappedAction(action), + } + .boxed_clone(), + ) +} + +fn wrap_count(action: Box, range: &CommandRange) -> Option> { + range.as_count().map(|count| { + WithCount { + count, + action: WrappedAction(action), + } + .boxed_clone() + }) +} + pub fn command_interceptor(mut input: &str, cx: &AppContext) -> Option { // NOTE: We also need to support passing arguments to commands like :w // (ideally with filename autocompletion). @@ -679,25 +792,12 @@ pub fn command_interceptor(mut input: &str, cx: &AppContext) -> Option) { vim.store_visual_marks(cx); vim.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { - let mut original_positions = vim.save_selection_starts(editor, cx); + let original_positions = vim.save_selection_starts(editor, cx); for _ in 0..count { editor.indent(&Default::default(), cx); } - vim.restore_selection_cursors(editor, cx, &mut original_positions); + vim.restore_selection_cursors(editor, cx, original_positions); }); }); if vim.mode.is_visual() { @@ -38,11 +38,11 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.store_visual_marks(cx); vim.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { - let mut original_positions = vim.save_selection_starts(editor, cx); + let original_positions = vim.save_selection_starts(editor, cx); for _ in 0..count { editor.outdent(&Default::default(), cx); } - vim.restore_selection_cursors(editor, cx, &mut original_positions); + vim.restore_selection_cursors(editor, cx, original_positions); }); }); if vim.mode.is_visual() { diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 10bf3c8e8d73b4..4a4927a2fc5bfd 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -395,9 +395,9 @@ impl Vim { self.store_visual_marks(cx); self.update_editor(cx, |vim, editor, cx| { editor.transact(cx, |editor, cx| { - let mut original_positions = vim.save_selection_starts(editor, cx); + let original_positions = vim.save_selection_starts(editor, cx); editor.toggle_comments(&Default::default(), cx); - vim.restore_selection_cursors(editor, cx, &mut original_positions); + vim.restore_selection_cursors(editor, cx, original_positions); }); }); if self.mode.is_visual() { @@ -467,7 +467,7 @@ impl Vim { &self, editor: &mut Editor, cx: &mut ViewContext, - positions: &mut HashMap, + mut positions: HashMap, ) { editor.change_selections(Some(Autoscroll::fit()), cx, |s| { s.move_with(|map, selection| { diff --git a/crates/vim/src/normal/yank.rs b/crates/vim/src/normal/yank.rs index 8271aa6cabc163..c176cd6ca9cf3d 100644 --- a/crates/vim/src/normal/yank.rs +++ b/crates/vim/src/normal/yank.rs @@ -1,4 +1,4 @@ -use std::time::Duration; +use std::{ops::Range, time::Duration}; use crate::{ motion::Motion, @@ -73,7 +73,18 @@ impl Vim { linewise: bool, cx: &mut ViewContext, ) { - self.copy_selections_content_internal(editor, linewise, true, cx); + self.copy_ranges( + editor, + linewise, + true, + editor + .selections + .all_adjusted(cx) + .iter() + .map(|s| s.range()) + .collect(), + cx, + ) } pub fn copy_selections_content( @@ -82,17 +93,28 @@ impl Vim { linewise: bool, cx: &mut ViewContext, ) { - self.copy_selections_content_internal(editor, linewise, false, cx); + self.copy_ranges( + editor, + linewise, + false, + editor + .selections + .all_adjusted(cx) + .iter() + .map(|s| s.range()) + .collect(), + cx, + ) } - fn copy_selections_content_internal( + pub(crate) fn copy_ranges( &mut self, editor: &mut Editor, linewise: bool, is_yank: bool, + selections: Vec>, cx: &mut ViewContext, ) { - let selections = editor.selections.all_adjusted(cx); let buffer = editor.buffer().read(cx).snapshot(cx); let mut text = String::new(); let mut clipboard_selections = Vec::with_capacity(selections.len()); diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 1503eaac1b6b43..72474d3ae40781 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -63,12 +63,7 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.record_current_action(cx); vim.visual_delete(true, cx); }); - Vim::action(editor, cx, |vim, _: &VisualYank, cx| { - vim.visual_yank(false, cx) - }); - Vim::action(editor, cx, |vim, _: &VisualYankLine, cx| { - vim.visual_yank(true, cx) - }); + Vim::action(editor, cx, |vim, _: &VisualYank, cx| vim.visual_yank(cx)); Vim::action(editor, cx, Vim::select_next); Vim::action(editor, cx, Vim::select_previous); @@ -483,11 +478,10 @@ impl Vim { self.switch_mode(Mode::Normal, true, cx); } - pub fn visual_yank(&mut self, line_mode: bool, cx: &mut ViewContext) { + pub fn visual_yank(&mut self, cx: &mut ViewContext) { self.store_visual_marks(cx); self.update_editor(cx, |vim, editor, cx| { - let line_mode = line_mode || editor.selections.line_mode; - editor.selections.line_mode = line_mode; + let line_mode = editor.selections.line_mode; vim.yank_selections_content(editor, line_mode, cx); editor.change_selections(None, cx, |s| { s.move_with(|map, selection| { @@ -657,7 +651,7 @@ impl Vim { self.stop_recording(cx); self.visual_delete(false, cx) } - Some(Operator::Yank) => self.visual_yank(false, cx), + Some(Operator::Yank) => self.visual_yank(cx), _ => {} // Ignoring other operators } } From caaa9a00a987855d6f64e6ef88f70cfd2499f848 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 27 Sep 2024 13:30:25 -0400 Subject: [PATCH 376/762] Remove Qwen2 model (#18444) Removed deprecated Qwen2 7B Instruct model from zed.dev provider (staff only). Release Notes: - N/A --- crates/collab/k8s/collab.template.yml | 12 --- crates/collab/src/lib.rs | 4 - crates/collab/src/llm.rs | 36 -------- crates/collab/src/llm/authorization.rs | 2 - crates/collab/src/llm/db/seed.rs | 9 -- .../collab/src/llm/db/tests/provider_tests.rs | 1 - crates/collab/src/tests/test_server.rs | 2 - .../language_model/src/model/cloud_model.rs | 27 ------ crates/language_model/src/provider/cloud.rs | 84 +------------------ crates/rpc/src/llm.rs | 1 - 10 files changed, 2 insertions(+), 176 deletions(-) diff --git a/crates/collab/k8s/collab.template.yml b/crates/collab/k8s/collab.template.yml index 7ddb871503cccb..7d4ea6eb9a3cb2 100644 --- a/crates/collab/k8s/collab.template.yml +++ b/crates/collab/k8s/collab.template.yml @@ -149,18 +149,6 @@ spec: secretKeyRef: name: google-ai key: api_key - - name: RUNPOD_API_KEY - valueFrom: - secretKeyRef: - name: runpod - key: api_key - optional: true - - name: RUNPOD_API_SUMMARY_URL - valueFrom: - secretKeyRef: - name: runpod - key: summary - optional: true - name: BLOB_STORE_ACCESS_KEY valueFrom: secretKeyRef: diff --git a/crates/collab/src/lib.rs b/crates/collab/src/lib.rs index 81ff3ff21f6926..6c32023a97a287 100644 --- a/crates/collab/src/lib.rs +++ b/crates/collab/src/lib.rs @@ -170,8 +170,6 @@ pub struct Config { pub anthropic_api_key: Option>, pub anthropic_staff_api_key: Option>, pub llm_closed_beta_model_name: Option>, - pub runpod_api_key: Option>, - pub runpod_api_summary_url: Option>, pub zed_client_checksum_seed: Option, pub slack_panics_webhook: Option, pub auto_join_channel_id: Option, @@ -235,8 +233,6 @@ impl Config { stripe_api_key: None, stripe_price_id: None, supermaven_admin_api_key: None, - runpod_api_key: None, - runpod_api_summary_url: None, user_backfiller_github_access_token: None, } } diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index 53f0bfdfd0130a..14f10342a78dd7 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -400,42 +400,6 @@ async fn perform_completion( }) .boxed() } - LanguageModelProvider::Zed => { - let api_key = state - .config - .runpod_api_key - .as_ref() - .context("no Qwen2-7B API key configured on the server")?; - let api_url = state - .config - .runpod_api_summary_url - .as_ref() - .context("no Qwen2-7B URL configured on the server")?; - let chunks = open_ai::stream_completion( - &state.http_client, - api_url, - api_key, - serde_json::from_str(params.provider_request.get())?, - None, - ) - .await?; - - chunks - .map(|event| { - event.map(|chunk| { - let input_tokens = - chunk.usage.as_ref().map_or(0, |u| u.prompt_tokens) as usize; - let output_tokens = - chunk.usage.as_ref().map_or(0, |u| u.completion_tokens) as usize; - ( - serde_json::to_vec(&chunk).unwrap(), - input_tokens, - output_tokens, - ) - }) - }) - .boxed() - } }; Ok(Response::new(Body::wrap_stream(TokenCountingStream { diff --git a/crates/collab/src/llm/authorization.rs b/crates/collab/src/llm/authorization.rs index cc345579eca229..9f82af51c39b73 100644 --- a/crates/collab/src/llm/authorization.rs +++ b/crates/collab/src/llm/authorization.rs @@ -77,7 +77,6 @@ fn authorize_access_for_country( LanguageModelProvider::Anthropic => anthropic::is_supported_country(country_code), LanguageModelProvider::OpenAi => open_ai::is_supported_country(country_code), LanguageModelProvider::Google => google_ai::is_supported_country(country_code), - LanguageModelProvider::Zed => true, }; if !is_country_supported_by_provider { Err(Error::http( @@ -213,7 +212,6 @@ mod tests { (LanguageModelProvider::Anthropic, "T1"), // Tor (LanguageModelProvider::OpenAi, "T1"), // Tor (LanguageModelProvider::Google, "T1"), // Tor - (LanguageModelProvider::Zed, "T1"), // Tor ]; for (provider, country_code) in cases { diff --git a/crates/collab/src/llm/db/seed.rs b/crates/collab/src/llm/db/seed.rs index 24bc224227c8d2..55c6c30cd5d8bf 100644 --- a/crates/collab/src/llm/db/seed.rs +++ b/crates/collab/src/llm/db/seed.rs @@ -40,15 +40,6 @@ pub async fn seed_database(_config: &Config, db: &mut LlmDatabase, _force: bool) price_per_million_input_tokens: 25, // $0.25/MTok price_per_million_output_tokens: 125, // $1.25/MTok }, - ModelParams { - provider: LanguageModelProvider::Zed, - name: "Qwen/Qwen2-7B-Instruct".into(), - max_requests_per_minute: 5, - max_tokens_per_minute: 25_000, // These are arbitrary limits we've set to cap costs; we control this number - max_tokens_per_day: 300_000, - price_per_million_input_tokens: 25, - price_per_million_output_tokens: 125, - }, ]) .await } diff --git a/crates/collab/src/llm/db/tests/provider_tests.rs b/crates/collab/src/llm/db/tests/provider_tests.rs index ef0da1c373fca6..0bb55ee4b69a6c 100644 --- a/crates/collab/src/llm/db/tests/provider_tests.rs +++ b/crates/collab/src/llm/db/tests/provider_tests.rs @@ -26,7 +26,6 @@ async fn test_initialize_providers(db: &mut LlmDatabase) { LanguageModelProvider::Anthropic, LanguageModelProvider::Google, LanguageModelProvider::OpenAi, - LanguageModelProvider::Zed ] ) } diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 94c7d3907ff4ff..5ff4a720741bc8 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -679,8 +679,6 @@ impl TestServer { stripe_api_key: None, stripe_price_id: None, supermaven_admin_api_key: None, - runpod_api_key: None, - runpod_api_summary_url: None, user_backfiller_github_access_token: None, }, }) diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index 2ce48931f6d4db..9242f80e6e16c7 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -12,7 +12,6 @@ pub enum CloudModel { Anthropic(anthropic::Model), OpenAi(open_ai::Model), Google(google_ai::Model), - Zed(ZedModel), } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, EnumIter)] @@ -21,26 +20,6 @@ pub enum ZedModel { Qwen2_7bInstruct, } -impl ZedModel { - pub fn id(&self) -> &str { - match self { - ZedModel::Qwen2_7bInstruct => "Qwen/Qwen2-7B-Instruct", - } - } - - pub fn display_name(&self) -> &str { - match self { - ZedModel::Qwen2_7bInstruct => "Qwen2 7B Instruct", - } - } - - pub fn max_token_count(&self) -> usize { - match self { - ZedModel::Qwen2_7bInstruct => 28000, - } - } -} - impl Default for CloudModel { fn default() -> Self { Self::Anthropic(anthropic::Model::default()) @@ -53,7 +32,6 @@ impl CloudModel { Self::Anthropic(model) => model.id(), Self::OpenAi(model) => model.id(), Self::Google(model) => model.id(), - Self::Zed(model) => model.id(), } } @@ -62,7 +40,6 @@ impl CloudModel { Self::Anthropic(model) => model.display_name(), Self::OpenAi(model) => model.display_name(), Self::Google(model) => model.display_name(), - Self::Zed(model) => model.display_name(), } } @@ -78,7 +55,6 @@ impl CloudModel { Self::Anthropic(model) => model.max_token_count(), Self::OpenAi(model) => model.max_token_count(), Self::Google(model) => model.max_token_count(), - Self::Zed(model) => model.max_token_count(), } } @@ -115,9 +91,6 @@ impl CloudModel { LanguageModelAvailability::RequiresPlan(Plan::ZedPro) } }, - Self::Zed(model) => match model { - ZedModel::Qwen2_7bInstruct => LanguageModelAvailability::RequiresPlan(Plan::ZedPro), - }, } } } diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 3c407b77d929de..b81f6f9fba3363 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -3,7 +3,7 @@ use crate::provider::anthropic::map_to_language_model_completion_events; use crate::{ settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration, LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName, - LanguageModelProviderState, LanguageModelRequest, RateLimiter, ZedModel, + LanguageModelProviderState, LanguageModelRequest, RateLimiter, }; use anthropic::AnthropicError; use anyhow::{anyhow, Result}; @@ -219,9 +219,6 @@ impl LanguageModelProvider for CloudLanguageModelProvider { models.insert(model.id().to_string(), CloudModel::Google(model)); } } - for model in ZedModel::iter() { - models.insert(model.id().to_string(), CloudModel::Zed(model)); - } } else { models.insert( anthropic::Model::Claude3_5Sonnet.id().to_string(), @@ -472,7 +469,7 @@ impl LanguageModel for CloudLanguageModel { min_total_token: cache.min_total_token, }) } - CloudModel::OpenAi(_) | CloudModel::Google(_) | CloudModel::Zed(_) => None, + CloudModel::OpenAi(_) | CloudModel::Google(_) => None, } } @@ -502,9 +499,6 @@ impl LanguageModel for CloudLanguageModel { } .boxed() } - CloudModel::Zed(_) => { - count_open_ai_tokens(request, open_ai::Model::ThreePointFiveTurbo, cx) - } } } @@ -603,35 +597,6 @@ impl LanguageModel for CloudLanguageModel { } .boxed() } - CloudModel::Zed(model) => { - let client = self.client.clone(); - let mut request = request.into_open_ai(model.id().into(), None); - request.max_tokens = Some(4000); - let llm_api_token = self.llm_api_token.clone(); - let future = self.request_limiter.stream(async move { - let response = Self::perform_llm_completion( - client.clone(), - llm_api_token, - PerformCompletionParams { - provider: client::LanguageModelProvider::Zed, - model: request.model.clone(), - provider_request: RawValue::from_string(serde_json::to_string( - &request, - )?)?, - }, - None, - ) - .await?; - Ok(open_ai::extract_text_from_events(response_lines(response))) - }); - async move { - Ok(future - .await? - .map(|result| result.map(LanguageModelCompletionEvent::Text)) - .boxed()) - } - .boxed() - } } } @@ -735,51 +700,6 @@ impl LanguageModel for CloudLanguageModel { CloudModel::Google(_) => { future::ready(Err(anyhow!("tool use not implemented for Google AI"))).boxed() } - CloudModel::Zed(model) => { - // All Zed models are OpenAI-based at the time of writing. - let mut request = request.into_open_ai(model.id().into(), None); - request.tool_choice = Some(open_ai::ToolChoice::Other( - open_ai::ToolDefinition::Function { - function: open_ai::FunctionDefinition { - name: tool_name.clone(), - description: None, - parameters: None, - }, - }, - )); - request.tools = vec![open_ai::ToolDefinition::Function { - function: open_ai::FunctionDefinition { - name: tool_name.clone(), - description: Some(tool_description), - parameters: Some(input_schema), - }, - }]; - - self.request_limiter - .run(async move { - let response = Self::perform_llm_completion( - client.clone(), - llm_api_token, - PerformCompletionParams { - provider: client::LanguageModelProvider::Zed, - model: request.model.clone(), - provider_request: RawValue::from_string(serde_json::to_string( - &request, - )?)?, - }, - None, - ) - .await?; - - Ok(open_ai::extract_tool_args_from_events( - tool_name, - Box::pin(response_lines(response)), - ) - .await? - .boxed()) - }) - .boxed() - } } } } diff --git a/crates/rpc/src/llm.rs b/crates/rpc/src/llm.rs index 6cae54b3090d56..681f2d8db32d18 100644 --- a/crates/rpc/src/llm.rs +++ b/crates/rpc/src/llm.rs @@ -12,7 +12,6 @@ pub enum LanguageModelProvider { Anthropic, OpenAi, Google, - Zed, } #[derive(Debug, Serialize, Deserialize)] From c3075dfe9afc788a1a0a2b965f0eb4f9a4ba77b4 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 27 Sep 2024 11:14:28 -0700 Subject: [PATCH 377/762] Fix bugs in diff hunk highlighting (#18454) Fixes https://github.com/zed-industries/zed/issues/18405 In https://github.com/zed-industries/zed/pull/18313, we introduced a problem where git addition highlights might spuriously return when undoing certain changes. It turned out, there were already some cases where git hunk highlighting was incorrect when editing at the boundaries of expanded diff hunks. In this PR, I've introduced a test helper method for more rigorously (and readably) testing the editor's git state. You can assert about the entire state of an editor's diff decorations using a formatted diff: ```rust cx.assert_diff_hunks( r#" - use some::mod1; use some::mod2; const A: u32 = 42; - const B: u32 = 42; const C: u32 = 42; fn main() { - println!("hello"); + //println!("hello"); println!("world"); + // + // } fn another() { println!("another"); + println!("another"); } - fn another2() { println!("another2"); } "# .unindent(), ); ``` This will assert about the editor's actual row highlights, not just the editor's internal hunk-tracking state. I rewrote all of our editor diff tests to use these more high-level assertions, and it caught the new bug, as well as some pre-existing bugs in the highlighting of added content. The problem was how we *remove* highlighted rows. Previously, it relied on supplying exactly the same range as one that we had previously highlighted. I've added a `remove_highlighted_rows(ranges)` APIs which is much simpler - it clears out any row ranges that intersect the given ranges (which is all that we need for the Git diff use case). Release Notes: - N/A --- Cargo.lock | 1 + crates/assistant/src/inline_assistant.rs | 4 +- crates/editor/Cargo.toml | 5 +- crates/editor/src/editor.rs | 157 +- crates/editor/src/editor_tests.rs | 1682 +++++------------ crates/editor/src/hunk_diff.rs | 45 +- crates/editor/src/test.rs | 113 -- crates/editor/src/test/editor_test_context.rs | 129 +- crates/go_to_line/src/go_to_line.rs | 2 +- crates/outline/src/outline.rs | 8 +- 10 files changed, 710 insertions(+), 1436 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 85a62c9519e012..123141d188e0eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3729,6 +3729,7 @@ dependencies = [ "multi_buffer", "ordered-float 2.10.1", "parking_lot", + "pretty_assertions", "project", "rand 0.8.5", "release_channel", diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 9c117e66653e93..e2f2fa190d3977 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1142,7 +1142,7 @@ impl InlineAssistant { for row_range in inserted_row_ranges { editor.highlight_rows::( row_range, - Some(cx.theme().status().info_background), + cx.theme().status().info_background, false, cx, ); @@ -1209,7 +1209,7 @@ impl InlineAssistant { editor.set_show_inline_completions(Some(false), cx); editor.highlight_rows::( Anchor::min()..=Anchor::max(), - Some(cx.theme().status().deleted_background), + cx.theme().status().deleted_background, false, cx, ); diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index b6b22ef64d33f6..cfd9284f807650 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -24,7 +24,8 @@ test-support = [ "workspace/test-support", "tree-sitter-rust", "tree-sitter-typescript", - "tree-sitter-html" + "tree-sitter-html", + "unindent", ] [dependencies] @@ -54,6 +55,7 @@ markdown.workspace = true multi_buffer.workspace = true ordered-float.workspace = true parking_lot.workspace = true +pretty_assertions.workspace = true project.workspace = true rand.workspace = true rpc.workspace = true @@ -74,6 +76,7 @@ theme.workspace = true tree-sitter-html = { workspace = true, optional = true } tree-sitter-rust = { workspace = true, optional = true } tree-sitter-typescript = { workspace = true, optional = true } +unindent = { workspace = true, optional = true } ui.workspace = true url.workspace = true util.workspace = true diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index cfffa584b6c214..48785dbaa55cfc 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -822,7 +822,7 @@ impl SelectionHistory { struct RowHighlight { index: usize, range: RangeInclusive, - color: Option, + color: Hsla, should_autoscroll: bool, } @@ -11500,41 +11500,125 @@ impl Editor { } } - /// Adds or removes (on `None` color) a highlight for the rows corresponding to the anchor range given. - /// On matching anchor range, replaces the old highlight; does not clear the other existing highlights. - /// If multiple anchor ranges will produce highlights for the same row, the last range added will be used. + /// Adds a row highlight for the given range. If a row has multiple highlights, the + /// last highlight added will be used. pub fn highlight_rows( &mut self, - rows: RangeInclusive, - color: Option, + range: RangeInclusive, + color: Hsla, should_autoscroll: bool, cx: &mut ViewContext, ) { let snapshot = self.buffer().read(cx).snapshot(cx); let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); - let existing_highlight_index = row_highlights.binary_search_by(|highlight| { - highlight - .range - .start() - .cmp(rows.start(), &snapshot) - .then(highlight.range.end().cmp(rows.end(), &snapshot)) + let ix = row_highlights.binary_search_by(|highlight| { + Ordering::Equal + .then_with(|| highlight.range.start().cmp(&range.start(), &snapshot)) + .then_with(|| highlight.range.end().cmp(&range.end(), &snapshot)) }); - match (color, existing_highlight_index) { - (Some(_), Ok(ix)) | (_, Err(ix)) => row_highlights.insert( - ix, - RowHighlight { - index: post_inc(&mut self.highlight_order), - range: rows, - should_autoscroll, - color, - }, - ), - (None, Ok(i)) => { - row_highlights.remove(i); + + if let Err(mut ix) = ix { + let index = post_inc(&mut self.highlight_order); + + // If this range intersects with the preceding highlight, then merge it with + // the preceding highlight. Otherwise insert a new highlight. + let mut merged = false; + if ix > 0 { + let prev_highlight = &mut row_highlights[ix - 1]; + if prev_highlight + .range + .end() + .cmp(&range.start(), &snapshot) + .is_ge() + { + ix -= 1; + if prev_highlight + .range + .end() + .cmp(&range.end(), &snapshot) + .is_lt() + { + prev_highlight.range = *prev_highlight.range.start()..=*range.end(); + } + merged = true; + prev_highlight.index = index; + prev_highlight.color = color; + prev_highlight.should_autoscroll = should_autoscroll; + } + } + + if !merged { + row_highlights.insert( + ix, + RowHighlight { + range: range.clone(), + index, + color, + should_autoscroll, + }, + ); + } + + // If any of the following highlights intersect with this one, merge them. + while let Some(next_highlight) = row_highlights.get(ix + 1) { + let highlight = &row_highlights[ix]; + if next_highlight + .range + .start() + .cmp(&highlight.range.end(), &snapshot) + .is_le() + { + if next_highlight + .range + .end() + .cmp(&highlight.range.end(), &snapshot) + .is_gt() + { + row_highlights[ix].range = + *highlight.range.start()..=*next_highlight.range.end(); + } + row_highlights.remove(ix + 1); + } else { + break; + } } } } + /// Remove any highlighted row ranges of the given type that intersect the + /// given ranges. + pub fn remove_highlighted_rows( + &mut self, + ranges_to_remove: Vec>, + cx: &mut ViewContext, + ) { + let snapshot = self.buffer().read(cx).snapshot(cx); + let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); + let mut ranges_to_remove = ranges_to_remove.iter().peekable(); + row_highlights.retain(|highlight| { + while let Some(range_to_remove) = ranges_to_remove.peek() { + match range_to_remove.end.cmp(&highlight.range.start(), &snapshot) { + Ordering::Less => { + ranges_to_remove.next(); + } + Ordering::Equal => { + return false; + } + Ordering::Greater => { + match range_to_remove.start.cmp(&highlight.range.end(), &snapshot) { + Ordering::Less | Ordering::Equal => { + return false; + } + Ordering::Greater => break, + } + } + } + } + + true + }) + } + /// Clear all anchor ranges for a certain highlight context type, so no corresponding rows will be highlighted. pub fn clear_row_highlights(&mut self) { self.highlighted_rows.remove(&TypeId::of::()); @@ -11543,13 +11627,12 @@ impl Editor { /// For a highlight given context type, gets all anchor ranges that will be used for row highlighting. pub fn highlighted_rows( &self, - ) -> Option, Option<&Hsla>)>> { - Some( - self.highlighted_rows - .get(&TypeId::of::())? - .iter() - .map(|highlight| (&highlight.range, highlight.color.as_ref())), - ) + ) -> impl '_ + Iterator, Hsla)> { + self.highlighted_rows + .get(&TypeId::of::()) + .map_or(&[] as &[_], |vec| vec.as_slice()) + .iter() + .map(|highlight| (highlight.range.clone(), highlight.color)) } /// Merges all anchor ranges for all context types ever set, picking the last highlight added in case of a row conflict. @@ -11574,10 +11657,7 @@ impl Editor { used_highlight_orders.entry(row).or_insert(highlight.index); if highlight.index >= *used_index { *used_index = highlight.index; - match highlight.color { - Some(hsla) => unique_rows.insert(DisplayRow(row), hsla), - None => unique_rows.remove(&DisplayRow(row)), - }; + unique_rows.insert(DisplayRow(row), highlight.color); } } unique_rows @@ -11593,10 +11673,11 @@ impl Editor { .values() .flat_map(|highlighted_rows| highlighted_rows.iter()) .filter_map(|highlight| { - if highlight.color.is_none() || !highlight.should_autoscroll { - return None; + if highlight.should_autoscroll { + Some(highlight.range.start().to_display_point(snapshot).row()) + } else { + None } - Some(highlight.range.start().to_display_point(snapshot).row()) }) .min() } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 31a69918026f72..b17d94a5eb0f03 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -2,9 +2,8 @@ use super::*; use crate::{ scroll::scroll_amount::ScrollAmount, test::{ - assert_text_with_selections, build_editor, editor_hunks, - editor_lsp_test_context::EditorLspTestContext, editor_test_context::EditorTestContext, - expanded_hunks, expanded_hunks_background_highlights, select_ranges, + assert_text_with_selections, build_editor, editor_lsp_test_context::EditorLspTestContext, + editor_test_context::EditorTestContext, select_ranges, }, JoinLines, }; @@ -11196,36 +11195,30 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - let unexpanded_hunks = vec![ - ( - "use some::mod;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(0)..DisplayRow(1), - ), - ( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2), - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(4)..DisplayRow(5), - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(6)..DisplayRow(7), - ), - ]; + cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!(all_hunks, unexpanded_hunks); + editor.go_to_next_hunk(&GoToHunk, cx); + editor.toggle_hunk_diff(&ToggleHunkDiff, cx); }); + executor.run_until_parked(); + cx.assert_diff_hunks( + r#" + use some::modified; + + + fn main() { + - println!("hello"); + + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); cx.update_editor(|editor, cx| { - for _ in 0..4 { + for _ in 0..3 { editor.go_to_next_hunk(&GoToHunk, cx); editor.toggle_hunk_diff(&ToggleHunkDiff, cx); } @@ -11245,57 +11238,47 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ("use some::mod;\n".to_string(), DiffHunkStatus::Modified, DisplayRow(2)..DisplayRow(3)), - ("const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(6)..DisplayRow(6)), - (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(10)..DisplayRow(11)), - ("".to_string(), DiffHunkStatus::Added, DisplayRow(13)..DisplayRow(14)), - ], - "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ - (from modified and removed hunks)" - ); - assert_eq!( - all_hunks, all_expanded_hunks, - "Editor hunks should not change and all be expanded" - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(2)..=DisplayRow(2), DisplayRow(10)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(13)], - "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" - ); - }); + + cx.assert_diff_hunks( + r#" + - use some::mod; + + use some::modified; + + - const A: u32 = 42; + + fn main() { + - println!("hello"); + + println!("hello there"); + + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); cx.update_editor(|editor, cx| { editor.cancel(&Cancel, cx); - - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "After cancelling in editor, no git highlights should be left" - ); - assert_eq!( - all_expanded_hunks, - Vec::new(), - "After cancelling in editor, no hunks should be expanded" - ); - assert_eq!( - all_hunks, unexpanded_hunks, - "After cancelling in editor, regular hunks' coordinates should get back to normal" - ); }); + + cx.assert_diff_hunks( + r#" + use some::modified; + + + fn main() { + println!("hello there"); + + println!("around the"); + println!("world"); + } + "# + .unindent(), + ); } #[gpui::test] -async fn test_toggled_diff_base_change( +async fn test_diff_base_change_with_expanded_diff_hunks( executor: BackgroundExecutor, cx: &mut gpui::TestAppContext, ) { @@ -11339,115 +11322,78 @@ async fn test_toggled_diff_base_change( cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(9)..DisplayRow(11) - ), - ] - ); - }); cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod2; + cx.assert_diff_hunks( + r#" + - use some::mod1; + use some::mod2; + + const A: u32 = 42; + - const B: u32 = 42; + const C: u32 = 42; + + fn main() { + - println!("hello"); + + //println!("hello"); + + println!("world"); + + // + + // + } + "# + .unindent(), + ); - const A: u32 = 42; - const C: u32 = 42; + cx.set_diff_base(Some("new diff base!")); + executor.run_until_parked(); + cx.assert_diff_hunks( + r#" + use some::mod2; - fn main(ˇ) { - //println!("hello"); + const A: u32 = 42; + const C: u32 = 42; - println!("world"); - // - // - } + fn main() { + //println!("hello"); + + println!("world"); + // + // + } "# .unindent(), ); cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ("use some::mod1;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(2)..DisplayRow(2)), - ("const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(7)..DisplayRow(7)), - (" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(12)..DisplayRow(13)), - ("".to_string(), DiffHunkStatus::Added, DisplayRow(16)..DisplayRow(18)), - ], - "After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \ - (from modified and removed hunks)" - ); - assert_eq!( - all_hunks, all_expanded_hunks, - "Editor hunks should not change and all be expanded" - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(12)..=DisplayRow(12), DisplayRow(16)..=DisplayRow(17)], - "After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks" - ); + editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); - - cx.set_diff_base(Some("new diff base!")); executor.run_until_parked(); - - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "After diff base is changed, old git highlights should be removed" - ); - assert_eq!( - all_expanded_hunks, - Vec::new(), - "After diff base is changed, old git hunk expansions should be removed" - ); - assert_eq!( - all_hunks, - vec![( - "new diff base!".to_string(), - DiffHunkStatus::Modified, - DisplayRow(0)..snapshot.display_snapshot.max_point().row() - )], - "After diff base is changed, hunks should update" - ); - }); + cx.assert_diff_hunks( + r#" + - new diff base! + + use some::mod2; + + + + const A: u32 = 42; + + const C: u32 = 42; + + + + fn main() { + + //println!("hello"); + + + + println!("world"); + + // + + // + + } + "# + .unindent(), + ); } #[gpui::test] -async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) { +async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; @@ -11504,337 +11450,138 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(6)..DisplayRow(7) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(9)..DisplayRow(11) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(15)..DisplayRow(16) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(18)..DisplayRow(18) - ), - ] - ); - }); cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - «use some::mod2; - const A: u32 = 42; - const C: u32 = 42; + cx.assert_diff_hunks( + r#" + - use some::mod1; + use some::mod2; - fn main() { - //println!("hello"); + const A: u32 = 42; + - const B: u32 = 42; + const C: u32 = 42; - println!("world"); - // - //ˇ» - } + fn main() { + - println!("hello"); + + //println!("hello"); - fn another() { - println!("another"); - println!("another"); - } + println!("world"); + + // + + // + } - println!("another2"); - } + fn another() { + println!("another"); + + println!("another"); + } + + - fn another2() { + println!("another2"); + } "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(7)..DisplayRow(7) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(12)..DisplayRow(13) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(16)..DisplayRow(18) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(23)..DisplayRow(24) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(28)..DisplayRow(28) - ), - ], - ); - assert_eq!(all_hunks, all_expanded_hunks); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(12)..=DisplayRow(12), - DisplayRow(16)..=DisplayRow(17), - DisplayRow(23)..=DisplayRow(23) - ] - ); - }); + // Fold across some of the diff hunks. They should no longer appear expanded. cx.update_editor(|editor, cx| editor.fold_selected_ranges(&FoldSelectedRanges, cx)); cx.executor().run_until_parked(); - cx.assert_editor_state( - &r#" - «use some::mod2; - const A: u32 = 42; - const C: u32 = 42; + // Hunks are not shown if their position is within a fold + cx.assert_diff_hunks( + r#" + use some::mod2; - fn main() { - //println!("hello"); + const A: u32 = 42; + const C: u32 = 42; - println!("world"); - // - //ˇ» - } + fn main() { + //println!("hello"); - fn another() { - println!("another"); - println!("another"); - } + println!("world"); + // + // + } - println!("another2"); - } + fn another() { + println!("another"); + + println!("another"); + } + + - fn another2() { + println!("another2"); + } "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(0)..DisplayRow(0) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(0)..DisplayRow(1) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(6)..DisplayRow(7) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(11)..DisplayRow(11) - ), - ], - "Hunk list should still return shifted folded hunks" - ); - assert_eq!( - all_expanded_hunks, - vec![ - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(6)..DisplayRow(7) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(11)..DisplayRow(11) - ), - ], - "Only non-folded hunks should be left expanded" - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(6)..=DisplayRow(6)], - "Only one hunk is left not folded, its highlight should be visible" - ); - }); cx.update_editor(|editor, cx| { editor.select_all(&SelectAll, cx); editor.unfold_lines(&UnfoldLines, cx); }); cx.executor().run_until_parked(); - cx.assert_editor_state( - &r#" - «use some::mod2; - const A: u32 = 42; - const C: u32 = 42; + // The deletions reappear when unfolding. + cx.assert_diff_hunks( + r#" + - use some::mod1; + use some::mod2; + + const A: u32 = 42; + - const B: u32 = 42; + const C: u32 = 42; + + fn main() { + - println!("hello"); + + //println!("hello"); + + println!("world"); + + // + + // + } + + fn another() { + println!("another"); + + println!("another"); + } + + - fn another2() { + println!("another2"); + } + "# + .unindent(), + ); +} - fn main() { - //println!("hello"); +#[gpui::test] +async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); - println!("world"); - // - // - } + let file_1_old = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj"; + let file_1_new = "aaa\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj"; + let file_2_old = "lll\nmmm\nnnn\nooo\nppp\nqqq\nrrr\nsss\nttt\nuuu"; + let file_2_new = "lll\nmmm\nNNN\nooo\nppp\nqqq\nrrr\nsss\nttt\nuuu"; + let file_3_old = "111\n222\n333\n444\n555\n777\n888\n999\n000\n!!!"; + let file_3_new = "111\n222\n333\n444\n555\n666\n777\n888\n999\n000\n!!!"; - fn another() { - println!("another"); - println!("another"); - } - - println!("another2"); - } - ˇ»"# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(2)..DisplayRow(2) - ), - ( - "const B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(7)..DisplayRow(7) - ), - ( - " println!(\"hello\");\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(12)..DisplayRow(13) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(16)..DisplayRow(18) - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(23)..DisplayRow(24) - ), - ( - "fn another2() {\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(28)..DisplayRow(28) - ), - ], - ); - assert_eq!(all_hunks, all_expanded_hunks); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(12)..=DisplayRow(12), - DisplayRow(16)..=DisplayRow(17), - DisplayRow(23)..=DisplayRow(23) - ], - "After unfolding, all hunk diffs should be visible again" - ); - }); -} - -#[gpui::test] -async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) { - init_test(cx, |_| {}); - - let cols = 4; - let rows = 10; - let sample_text_1 = sample_text(rows, cols, 'a'); - assert_eq!( - sample_text_1, - "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj" - ); - let modified_sample_text_1 = "aaaa\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj"; - let sample_text_2 = sample_text(rows, cols, 'l'); - assert_eq!( - sample_text_2, - "llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu" - ); - let modified_sample_text_2 = "llll\nmmmm\n1n1n1n1n1\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu"; - let sample_text_3 = sample_text(rows, cols, 'v'); - assert_eq!( - sample_text_3, - "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}" - ); - let modified_sample_text_3 = - "vvvv\nwwww\nxxxx\nyyyy\nzzzz\n@@@@\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}"; - let buffer_1 = cx.new_model(|cx| { - let mut buffer = Buffer::local(modified_sample_text_1.to_string(), cx); - buffer.set_diff_base(Some(sample_text_1.clone()), cx); - buffer - }); - let buffer_2 = cx.new_model(|cx| { - let mut buffer = Buffer::local(modified_sample_text_2.to_string(), cx); - buffer.set_diff_base(Some(sample_text_2.clone()), cx); - buffer - }); - let buffer_3 = cx.new_model(|cx| { - let mut buffer = Buffer::local(modified_sample_text_3.to_string(), cx); - buffer.set_diff_base(Some(sample_text_3.clone()), cx); - buffer - }); + let buffer_1 = cx.new_model(|cx| { + let mut buffer = Buffer::local(file_1_new.to_string(), cx); + buffer.set_diff_base(Some(file_1_old.into()), cx); + buffer + }); + let buffer_2 = cx.new_model(|cx| { + let mut buffer = Buffer::local(file_2_new.to_string(), cx); + buffer.set_diff_base(Some(file_2_old.into()), cx); + buffer + }); + let buffer_3 = cx.new_model(|cx| { + let mut buffer = Buffer::local(file_3_new.to_string(), cx); + buffer.set_diff_base(Some(file_3_old.into()), cx); + buffer + }); let multi_buffer = cx.new_model(|cx| { let mut multibuffer = MultiBuffer::new(ReadWrite); @@ -11850,7 +11597,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) primary: None, }, ExcerptRange { - context: Point::new(9, 0)..Point::new(10, 4), + context: Point::new(9, 0)..Point::new(10, 3), primary: None, }, ], @@ -11868,7 +11615,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) primary: None, }, ExcerptRange { - context: Point::new(9, 0)..Point::new(10, 4), + context: Point::new(9, 0)..Point::new(10, 3), primary: None, }, ], @@ -11886,7 +11633,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) primary: None, }, ExcerptRange { - context: Point::new(9, 0)..Point::new(10, 4), + context: Point::new(9, 0)..Point::new(10, 3), primary: None, }, ], @@ -11895,143 +11642,81 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) multibuffer }); - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/a", - json!({ - "main.rs": modified_sample_text_1, - "other.rs": modified_sample_text_2, - "lib.rs": modified_sample_text_3, - }), - ) - .await; + let editor = cx.add_window(|cx| Editor::new(EditorMode::Full, multi_buffer, None, true, cx)); + let mut cx = EditorTestContext::for_editor(editor, cx).await; + cx.run_until_parked(); - let project = Project::test(fs, ["/a".as_ref()], cx).await; - let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); - let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); - let multi_buffer_editor = cx.new_view(|cx| { - Editor::new( - EditorMode::Full, - multi_buffer, - Some(project.clone()), - true, - cx, - ) - }); - cx.executor().run_until_parked(); + cx.assert_editor_state( + &" + ˇaaa + ccc + ddd - let expected_all_hunks = vec![ - ( - "bbbb\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(4)..DisplayRow(4), - ), - ( - "nnnn\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(21)..DisplayRow(22), - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(41)..DisplayRow(42), - ), - ]; - let expected_all_hunks_shifted = vec![ - ( - "bbbb\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(6)..DisplayRow(6), - ), - ( - "nnnn\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(25)..DisplayRow(26), - ), - ( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(46)..DisplayRow(47), - ), - ]; + ggg + hhh - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!(all_hunks, expected_all_hunks); - assert_eq!(all_expanded_hunks, Vec::new()); - }); - multi_buffer_editor.update(cx, |editor, cx| { + lll + mmm + NNN + + qqq + rrr + + uuu + 111 + 222 + 333 + + 666 + 777 + + 000 + !!!" + .unindent(), + ); + + cx.update_editor(|editor, cx| { editor.select_all(&SelectAll, cx); editor.toggle_hunk_diff(&ToggleHunkDiff, cx); }); cx.executor().run_until_parked(); - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(25)..=DisplayRow(25), - DisplayRow(46)..=DisplayRow(46) - ], - ); - assert_eq!(all_hunks, expected_all_hunks_shifted); - assert_eq!(all_hunks, all_expanded_hunks); - }); - multi_buffer_editor.update(cx, |editor, cx| { - editor.toggle_hunk_diff(&ToggleHunkDiff, cx); - }); - cx.executor().run_until_parked(); - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!(all_hunks, expected_all_hunks); - assert_eq!(all_expanded_hunks, Vec::new()); - }); + cx.assert_diff_hunks( + " + aaa + - bbb + ccc + ddd - multi_buffer_editor.update(cx, |editor, cx| { - editor.toggle_hunk_diff(&ToggleHunkDiff, cx); - }); - cx.executor().run_until_parked(); - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![ - DisplayRow(25)..=DisplayRow(25), - DisplayRow(46)..=DisplayRow(46) - ], - ); - assert_eq!(all_hunks, expected_all_hunks_shifted); - assert_eq!(all_hunks, all_expanded_hunks); - }); + ggg + hhh - multi_buffer_editor.update(cx, |editor, cx| { - editor.toggle_hunk_diff(&ToggleHunkDiff, cx); - }); - cx.executor().run_until_parked(); - multi_buffer_editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!(all_hunks, expected_all_hunks); - assert_eq!(all_expanded_hunks, Vec::new()); - }); + + lll + mmm + - nnn + + NNN + + qqq + rrr + + uuu + 111 + 222 + 333 + + + 666 + 777 + + 000 + !!!" + .unindent(), + ); } #[gpui::test] -async fn test_edits_around_toggled_additions( +async fn test_edits_around_expanded_insertion_hunks( executor: BackgroundExecutor, cx: &mut gpui::TestAppContext, ) { @@ -12074,71 +11759,21 @@ async fn test_edits_around_toggled_additions( cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(4)..DisplayRow(7) - )] - ); - }); + cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - ˇ - - fn main() { - println!("hello"); - println!("world"); - } - "# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(8) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(7)] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - - cx.update_editor(|editor, cx| editor.handle_input("const D: u32 = 42;\n", cx)); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - const D: u32 = 42; - ˇ + + const B: u32 = 42; + + const C: u32 = 42; + + fn main() { println!("hello"); @@ -12148,134 +11783,20 @@ async fn test_edits_around_toggled_additions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(9) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(7)], - "Edited hunk should have one more line added" - ); - assert_eq!( - all_hunks, all_expanded_hunks, - "Expanded hunk should also grow with the addition" - ); - }); - cx.update_editor(|editor, cx| editor.handle_input("const E: u32 = 42;\n", cx)); + cx.update_editor(|editor, cx| editor.handle_input("const D: u32 = 42;\n", cx)); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - const D: u32 = 42; - const E: u32 = 42; - ˇ - - fn main() { - println!("hello"); - println!("world"); - } - "# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(10) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(7)], - "Edited hunk should have one more line added" - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - - cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - const D: u32 = 42; - ˇ - - fn main() { - println!("hello"); - - println!("world"); - } - "# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(5)..DisplayRow(9) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(5)..=DisplayRow(7)], - "Deleting a line should shrint the hunk" - ); - assert_eq!( - all_hunks, all_expanded_hunks, - "Expanded hunk should also shrink with the addition" - ); - }); - - cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - ˇ + + const B: u32 = 42; + + const C: u32 = 42; + + const D: u32 = 42; + + fn main() { println!("hello"); @@ -12285,148 +11806,21 @@ async fn test_edits_around_toggled_additions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "".to_string(), - DiffHunkStatus::Added, - DisplayRow(6)..DisplayRow(7) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(6)..=DisplayRow(6)] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - cx.update_editor(|editor, cx| { - editor.select_up_by_lines(&SelectUpByLines { lines: 5 }, cx); - editor.delete_line(&DeleteLine, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" - ˇ - - fn main() { - println!("hello"); - - println!("world"); - } - "# - .unindent(), - ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![ - ( - "use some::mod1;\nuse some::mod2;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(0)..DisplayRow(0) - ), - ( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - ) - ] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "Should close all stale expanded addition hunks" - ); - assert_eq!( - all_expanded_hunks, - vec![( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - )], - "Should open hunks that were adjacent to the stale addition one" - ); - }); -} - -#[gpui::test] -async fn test_edits_around_toggled_deletions( - executor: BackgroundExecutor, - cx: &mut gpui::TestAppContext, -) { - init_test(cx, |_| {}); - - let mut cx = EditorTestContext::new(cx).await; - - let diff_base = r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 42; - - - fn main() { - println!("hello"); - - println!("world"); - } - "# - .unindent(); + cx.update_editor(|editor, cx| editor.handle_input("const E: u32 = 42;\n", cx)); executor.run_until_parked(); - cx.set_state( - &r#" - use some::mod1; - use some::mod2; - - ˇconst B: u32 = 42; - const C: u32 = 42; - - - fn main() { - println!("hello"); - - println!("world"); - } - "# - .unindent(), - ); - cx.set_diff_base(Some(&diff_base)); - executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(3)..DisplayRow(3) - )] - ); - }); - cx.update_editor(|editor, cx| { - editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" + cx.assert_diff_hunks( + r#" use some::mod1; - use some::mod2; - - ˇconst B: u32 = 42; - const C: u32 = 42; + use some::mod2; + const A: u32 = 42; + + const B: u32 = 42; + + const C: u32 = 42; + + const D: u32 = 42; + + const E: u32 = 42; + + fn main() { println!("hello"); @@ -12436,33 +11830,23 @@ async fn test_edits_around_toggled_deletions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(5)..DisplayRow(5) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { + editor.move_up(&MoveUp, cx); editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" + + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; - ˇconst C: u32 = 42; - + const A: u32 = 42; + + const B: u32 = 42; + + const C: u32 = 42; + + const D: u32 = 42; + + fn main() { println!("hello"); @@ -12472,27 +11856,13 @@ async fn test_edits_around_toggled_deletions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "Deleted hunks do not highlight current editor's background" - ); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(6)..DisplayRow(6) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { + editor.move_up(&MoveUp, cx); + editor.delete_line(&DeleteLine, cx); + editor.move_up(&MoveUp, cx); + editor.delete_line(&DeleteLine, cx); + editor.move_up(&MoveUp, cx); editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); @@ -12501,6 +11871,7 @@ async fn test_edits_around_toggled_deletions( use some::mod1; use some::mod2; + const A: u32 = 42; ˇ fn main() { @@ -12511,33 +11882,15 @@ async fn test_edits_around_toggled_deletions( "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new()); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), - DiffHunkStatus::Removed, - DisplayRow(7)..DisplayRow(7) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - cx.update_editor(|editor, cx| { - editor.handle_input("replacement", cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; - replacementˇ + const A: u32 = 42; + + fn main() { println!("hello"); @@ -12546,29 +11899,29 @@ async fn test_edits_around_toggled_deletions( "# .unindent(), ); + cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(8)..DisplayRow(9) - )] - ); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(8)..=DisplayRow(8)], - "Modified expanded hunks should display additions and highlight their background" - ); - assert_eq!(all_hunks, all_expanded_hunks); + editor.select_up_by_lines(&SelectUpByLines { lines: 5 }, cx); + editor.delete_line(&DeleteLine, cx); }); + executor.run_until_parked(); + cx.assert_diff_hunks( + r#" + + - const A: u32 = 42; + + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); } #[gpui::test] -async fn test_edits_around_toggled_modifications( +async fn test_edits_around_expanded_deletion_hunks( executor: BackgroundExecutor, cx: &mut gpui::TestAppContext, ) { @@ -12583,14 +11936,14 @@ async fn test_edits_around_toggled_modifications( const A: u32 = 42; const B: u32 = 42; const C: u32 = 42; - const D: u32 = 42; fn main() { println!("hello"); println!("world"); - }"# + } + "# .unindent(); executor.run_until_parked(); cx.set_state( @@ -12598,298 +11951,165 @@ async fn test_edits_around_toggled_modifications( use some::mod1; use some::mod2; - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 43ˇ - const D: u32 = 42; + ˇconst B: u32 = 42; + const C: u32 = 42; fn main() { println!("hello"); println!("world"); - }"# + } + "# .unindent(), ); cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(5)..DisplayRow(6) - )] - ); - }); + cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" + + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; - const A: u32 = 42; + - const A: u32 = 42; const B: u32 = 42; - const C: u32 = 43ˇ - const D: u32 = 42; + const C: u32 = 42; fn main() { println!("hello"); println!("world"); - }"# + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(7)], - ); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(8) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { - editor.handle_input("\nnew_line\n", cx); + editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); cx.assert_editor_state( &r#" - use some::mod1; - use some::mod2; + use some::mod1; + use some::mod2; - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 43 - new_line - ˇ - const D: u32 = 42; + ˇconst C: u32 = 42; - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(7)], - "Modified hunk should grow highlighted lines on more text additions" - ); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(10) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - - cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.move_up(&MoveUp, cx); - editor.move_up(&MoveUp, cx); - editor.delete_line(&DeleteLine, cx); - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - - const A: u32 = 42; - ˇconst C: u32 = 43 - new_line + cx.assert_diff_hunks( + r#" + use some::mod1; + use some::mod2; - const D: u32 = 42; + - const A: u32 = 42; + - const B: u32 = 42; + const C: u32 = 42; - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(9)], - ); - assert_eq!( - all_hunks, - vec![( - "const B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(10) - )], - "Modified hunk should grow deleted lines on text deletions above" - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.handle_input("v", cx); + editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); cx.assert_editor_state( &r#" - use some::mod1; - use some::mod2; + use some::mod1; + use some::mod2; + + ˇ - vˇconst A: u32 = 42; - const C: u32 = 43 - new_line + fn main() { + println!("hello"); + + println!("world"); + } + "# + .unindent(), + ); + cx.assert_diff_hunks( + r#" + use some::mod1; + use some::mod2; - const D: u32 = 42; + - const A: u32 = 42; + - const B: u32 = 42; + - const C: u32 = 42; - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(10)], - "Modified hunk should grow deleted lines on text modifications above" - ); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(11) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { - editor.move_down(&MoveDown, cx); - editor.move_down(&MoveDown, cx); - editor.delete_line(&DeleteLine, cx) + editor.handle_input("replacement", cx); }); executor.run_until_parked(); cx.assert_editor_state( &r#" - use some::mod1; - use some::mod2; - - vconst A: u32 = 42; - const C: u32 = 43 - ˇ - const D: u32 = 42; + use some::mod1; + use some::mod2; + replacementˇ - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(9)], - "Modified hunk should grow shrink lines on modification lines removal" - ); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(10) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); - - cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); - editor.move_up(&MoveUp, cx); - editor.select_down_by_lines(&SelectDownByLines { lines: 4 }, cx); - editor.delete_line(&DeleteLine, cx) - }); - executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; + cx.assert_diff_hunks( + r#" + use some::mod1; + use some::mod2; - ˇ + - const A: u32 = 42; + - const B: u32 = 42; + - const C: u32 = 42; + - + + replacement - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + } + "# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - Vec::new(), - "Modified hunk should turn into a removed one on all modified lines removal" - ); - assert_eq!( - all_hunks, - vec![( - "const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\nconst D: u32 = 42;\n" - .to_string(), - DiffHunkStatus::Removed, - DisplayRow(8)..DisplayRow(8) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); } #[gpui::test] -async fn test_multiple_expanded_hunks_merge( +async fn test_edit_after_expanded_modification_hunk( executor: BackgroundExecutor, cx: &mut gpui::TestAppContext, ) { @@ -12913,7 +12133,7 @@ async fn test_multiple_expanded_hunks_merge( println!("world"); }"# .unindent(); - executor.run_until_parked(); + cx.set_state( &r#" use some::mod1; @@ -12935,30 +12155,20 @@ async fn test_multiple_expanded_hunks_merge( cx.set_diff_base(Some(&diff_base)); executor.run_until_parked(); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(5)..DisplayRow(6) - )] - ); - }); cx.update_editor(|editor, cx| { editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" + + cx.assert_diff_hunks( + r#" use some::mod1; use some::mod2; const A: u32 = 42; const B: u32 = 42; - const C: u32 = 43ˇ + - const C: u32 = 42; + + const C: u32 = 43 const D: u32 = 42; @@ -12969,47 +12179,31 @@ async fn test_multiple_expanded_hunks_merge( }"# .unindent(), ); - cx.update_editor(|editor, cx| { - let snapshot = editor.snapshot(cx); - let all_hunks = editor_hunks(editor, &snapshot, cx); - let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx); - assert_eq!( - expanded_hunks_background_highlights(editor, cx), - vec![DisplayRow(7)..=DisplayRow(7)], - ); - assert_eq!( - all_hunks, - vec![( - "const C: u32 = 42;\n".to_string(), - DiffHunkStatus::Modified, - DisplayRow(7)..DisplayRow(8) - )] - ); - assert_eq!(all_hunks, all_expanded_hunks); - }); cx.update_editor(|editor, cx| { editor.handle_input("\nnew_line\n", cx); }); executor.run_until_parked(); - cx.assert_editor_state( - &r#" - use some::mod1; - use some::mod2; - const A: u32 = 42; - const B: u32 = 42; - const C: u32 = 43 - new_line - ˇ - const D: u32 = 42; + cx.assert_diff_hunks( + r#" + use some::mod1; + use some::mod2; + + const A: u32 = 42; + const B: u32 = 42; + - const C: u32 = 42; + + const C: u32 = 43 + + new_line + + + const D: u32 = 42; - fn main() { - println!("hello"); + fn main() { + println!("hello"); - println!("world"); - }"# + println!("world"); + }"# .unindent(), ); } diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 4fa1f10a8a17c9..e819032471f442 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -19,8 +19,8 @@ use util::RangeExt; use crate::{ editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, - Editor, EditorElement, EditorSnapshot, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, - RangeToAnchorExt, RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, + Editor, EditorElement, EditorSnapshot, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, + RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; #[derive(Debug, Clone)] @@ -219,14 +219,7 @@ impl Editor { }); } - for removed_rows in highlights_to_remove { - editor.highlight_rows::( - to_inclusive_row_range(removed_rows, &snapshot), - None, - false, - cx, - ); - } + editor.remove_highlighted_rows::(highlights_to_remove, cx); editor.remove_blocks(blocks_to_remove, None, cx); for hunk in hunks_to_expand { editor.expand_diff_hunk(None, &hunk, cx); @@ -306,7 +299,7 @@ impl Editor { DiffHunkStatus::Added => { self.highlight_rows::( to_inclusive_row_range(hunk_start..hunk_end, &snapshot), - Some(added_hunk_color(cx)), + added_hunk_color(cx), false, cx, ); @@ -315,7 +308,7 @@ impl Editor { DiffHunkStatus::Modified => { self.highlight_rows::( to_inclusive_row_range(hunk_start..hunk_end, &snapshot), - Some(added_hunk_color(cx)), + added_hunk_color(cx), false, cx, ); @@ -850,14 +843,7 @@ impl Editor { retain }); - for removed_rows in highlights_to_remove { - editor.highlight_rows::( - to_inclusive_row_range(removed_rows, &snapshot), - None, - false, - cx, - ); - } + editor.remove_highlighted_rows::(highlights_to_remove, cx); editor.remove_blocks(blocks_to_remove, None, cx); if let Some(diff_base_buffer) = &diff_base_buffer { @@ -978,7 +964,7 @@ fn editor_with_deleted_text( editor.set_show_inline_completions(Some(false), cx); editor.highlight_rows::( Anchor::min()..=Anchor::max(), - Some(deleted_color), + deleted_color, false, cx, ); @@ -1060,15 +1046,16 @@ fn to_inclusive_row_range( row_range: Range, snapshot: &EditorSnapshot, ) -> RangeInclusive { - let mut display_row_range = - row_range.start.to_display_point(snapshot)..row_range.end.to_display_point(snapshot); - if display_row_range.end.row() > display_row_range.start.row() { - *display_row_range.end.row_mut() -= 1; + let mut end = row_range.end.to_point(&snapshot.buffer_snapshot); + if end.column == 0 && end.row > 0 { + end = Point::new( + end.row - 1, + snapshot + .buffer_snapshot + .line_len(MultiBufferRow(end.row - 1)), + ); } - let point_range = display_row_range.start.to_point(&snapshot.display_snapshot) - ..display_row_range.end.to_point(&snapshot.display_snapshot); - let new_range = point_range.to_anchors(&snapshot.buffer_snapshot); - new_range.start..=new_range.end + row_range.start..=snapshot.buffer_snapshot.anchor_after(end) } impl DisplayDiffHunk { diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index 50214cd723ee31..d04b266e61802b 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -88,116 +88,3 @@ pub(crate) fn build_editor_with_project( ) -> Editor { Editor::new(EditorMode::Full, buffer, Some(project), true, cx) } - -#[cfg(any(test, feature = "test-support"))] -pub fn editor_hunks( - editor: &Editor, - snapshot: &DisplaySnapshot, - cx: &mut ViewContext<'_, Editor>, -) -> Vec<( - String, - git::diff::DiffHunkStatus, - std::ops::Range, -)> { - use multi_buffer::MultiBufferRow; - use text::Point; - - use crate::hunk_status; - - snapshot - .buffer_snapshot - .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) - .map(|hunk| { - let display_range = Point::new(hunk.row_range.start.0, 0) - .to_display_point(snapshot) - .row() - ..Point::new(hunk.row_range.end.0, 0) - .to_display_point(snapshot) - .row(); - let (_, buffer, _) = editor - .buffer() - .read(cx) - .excerpt_containing(Point::new(hunk.row_range.start.0, 0), cx) - .expect("no excerpt for expanded buffer's hunk start"); - let diff_base = buffer - .read(cx) - .diff_base() - .expect("should have a diff base for expanded hunk") - .slice(hunk.diff_base_byte_range.clone()) - .to_string(); - (diff_base, hunk_status(&hunk), display_range) - }) - .collect() -} - -#[cfg(any(test, feature = "test-support"))] -pub fn expanded_hunks( - editor: &Editor, - snapshot: &DisplaySnapshot, - cx: &mut ViewContext<'_, Editor>, -) -> Vec<( - String, - git::diff::DiffHunkStatus, - std::ops::Range, -)> { - editor - .expanded_hunks - .hunks(false) - .map(|expanded_hunk| { - let hunk_display_range = expanded_hunk - .hunk_range - .start - .to_display_point(snapshot) - .row() - ..expanded_hunk - .hunk_range - .end - .to_display_point(snapshot) - .row(); - let (_, buffer, _) = editor - .buffer() - .read(cx) - .excerpt_containing(expanded_hunk.hunk_range.start, cx) - .expect("no excerpt for expanded buffer's hunk start"); - let diff_base = buffer - .read(cx) - .diff_base() - .expect("should have a diff base for expanded hunk") - .slice(expanded_hunk.diff_base_byte_range.clone()) - .to_string(); - (diff_base, expanded_hunk.status, hunk_display_range) - }) - .collect() -} - -#[cfg(any(test, feature = "test-support"))] -pub fn expanded_hunks_background_highlights( - editor: &mut Editor, - cx: &mut gpui::WindowContext, -) -> Vec> { - use crate::DisplayRow; - - let mut highlights = Vec::new(); - - let mut range_start = 0; - let mut previous_highlighted_row = None; - for (highlighted_row, _) in editor.highlighted_display_rows(cx) { - match previous_highlighted_row { - Some(previous_row) => { - if previous_row + 1 != highlighted_row.0 { - highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row)); - range_start = highlighted_row.0; - } - } - None => { - range_start = highlighted_row.0; - } - } - previous_highlighted_row = Some(highlighted_row.0); - } - if let Some(previous_row) = previous_highlighted_row { - highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row)); - } - - highlights -} diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 3e4ef174d422ae..2ec4f4a3b7b7bc 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -1,17 +1,18 @@ use crate::{ - display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DisplayPoint, Editor, MultiBuffer, - RowExt, + display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DiffRowHighlight, DisplayPoint, + Editor, MultiBuffer, RowExt, }; use collections::BTreeMap; use futures::Future; +use git::diff::DiffHunkStatus; use gpui::{ AnyWindowHandle, AppContext, Keystroke, ModelContext, Pixels, Point, View, ViewContext, - VisualTestContext, + VisualTestContext, WindowHandle, }; use indoc::indoc; use itertools::Itertools; use language::{Buffer, BufferSnapshot, LanguageRegistry}; -use multi_buffer::ExcerptRange; +use multi_buffer::{ExcerptRange, ToPoint}; use parking_lot::RwLock; use project::{FakeFs, Project}; use std::{ @@ -71,6 +72,16 @@ impl EditorTestContext { } } + pub async fn for_editor(editor: WindowHandle, cx: &mut gpui::TestAppContext) -> Self { + let editor_view = editor.root_view(cx).unwrap(); + Self { + cx: VisualTestContext::from_window(*editor.deref(), cx), + window: editor.into(), + editor: editor_view, + assertion_cx: AssertionContextManager::new(), + } + } + pub fn new_multibuffer( cx: &mut gpui::TestAppContext, excerpts: [&str; COUNT], @@ -297,6 +308,76 @@ impl EditorTestContext { state_context } + #[track_caller] + pub fn assert_diff_hunks(&mut self, expected_diff: String) { + // Normalize the expected diff. If it has no diff markers, then insert blank markers + // before each line. Strip any whitespace-only lines. + let has_diff_markers = expected_diff + .lines() + .any(|line| line.starts_with("+") || line.starts_with("-")); + let expected_diff_text = expected_diff + .split('\n') + .map(|line| { + let trimmed = line.trim(); + if trimmed.is_empty() { + String::new() + } else if has_diff_markers { + line.to_string() + } else { + format!(" {line}") + } + }) + .join("\n"); + + // Read the actual diff from the editor's row highlights and block + // decorations. + let actual_diff = self.editor.update(&mut self.cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + let text = editor.text(cx); + let insertions = editor + .highlighted_rows::() + .map(|(range, _)| { + range.start().to_point(&snapshot.buffer_snapshot).row + ..range.end().to_point(&snapshot.buffer_snapshot).row + 1 + }) + .collect::>(); + let deletions = editor + .expanded_hunks + .hunks + .iter() + .filter_map(|hunk| { + if hunk.blocks.is_empty() { + return None; + } + let row = hunk + .hunk_range + .start + .to_point(&snapshot.buffer_snapshot) + .row; + let (_, buffer, _) = editor + .buffer() + .read(cx) + .excerpt_containing(hunk.hunk_range.start, cx) + .expect("no excerpt for expanded buffer's hunk start"); + let deleted_text = buffer + .read(cx) + .diff_base() + .expect("should have a diff base for expanded hunk") + .slice(hunk.diff_base_byte_range.clone()) + .to_string(); + if let DiffHunkStatus::Modified | DiffHunkStatus::Removed = hunk.status { + Some((row, deleted_text)) + } else { + None + } + }) + .collect::>(); + format_diff(text, deletions, insertions) + }); + + pretty_assertions::assert_eq!(actual_diff, expected_diff_text, "unexpected diff state"); + } + /// Make an assertion about the editor's text and the ranges and directions /// of its selections using a string containing embedded range markers. /// @@ -401,6 +482,46 @@ impl EditorTestContext { } } +fn format_diff( + text: String, + actual_deletions: Vec<(u32, String)>, + actual_insertions: Vec>, +) -> String { + let mut diff = String::new(); + for (row, line) in text.split('\n').enumerate() { + let row = row as u32; + if row > 0 { + diff.push('\n'); + } + if let Some(text) = actual_deletions + .iter() + .find_map(|(deletion_row, deleted_text)| { + if *deletion_row == row { + Some(deleted_text) + } else { + None + } + }) + { + for line in text.lines() { + diff.push('-'); + if !line.is_empty() { + diff.push(' '); + diff.push_str(line); + } + diff.push('\n'); + } + } + let marker = if actual_insertions.iter().any(|range| range.contains(&row)) { + "+ " + } else { + " " + }; + diff.push_str(format!("{marker}{line}").trim_end()); + } + diff +} + impl Deref for EditorTestContext { type Target = gpui::VisualTestContext; diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 4f3e6194a022eb..fd631648c2c759 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -121,7 +121,7 @@ impl GoToLine { active_editor.clear_row_highlights::(); active_editor.highlight_rows::( anchor..=anchor, - Some(cx.theme().colors().editor_highlighted_line_background), + cx.theme().colors().editor_highlighted_line_background, true, cx, ); diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index cd641636349e39..520311b6f3c625 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -144,7 +144,7 @@ impl OutlineViewDelegate { active_editor.clear_row_highlights::(); active_editor.highlight_rows::( outline_item.range.start..=outline_item.range.end, - Some(cx.theme().colors().editor_highlighted_line_background), + cx.theme().colors().editor_highlighted_line_background, true, cx, ); @@ -240,10 +240,10 @@ impl PickerDelegate for OutlineViewDelegate { self.prev_scroll_position.take(); self.active_editor.update(cx, |active_editor, cx| { - if let Some(rows) = active_editor + let highlight = active_editor .highlighted_rows::() - .and_then(|highlights| highlights.into_iter().next().map(|(rows, _)| rows.clone())) - { + .next(); + if let Some((rows, _)) = highlight { active_editor.change_selections(Some(Autoscroll::center()), cx, |s| { s.select_ranges([*rows.start()..*rows.start()]) }); From d5f67406b0b7eb9ffd261b79467d17c1dc28a041 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 27 Sep 2024 22:42:04 +0300 Subject: [PATCH 378/762] Install cargo-edito without extra features (#18457) https://github.com/killercup/cargo-edit/pull/907 removed the feature from the crate Release Notes: - N/A --- .github/workflows/bump_patch_version.yml | 2 +- script/lib/bump-version.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index d05da31e6a0f67..b875ff7b221028 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -41,7 +41,7 @@ jobs: exit 1 ;; esac - which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl + which cargo-set-version > /dev/null || cargo install cargo-edit output=$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //') git commit -am "Bump to $output for @$GITHUB_ACTOR" --author "Zed Bot " git tag v${output}${tag_suffix} diff --git a/script/lib/bump-version.sh b/script/lib/bump-version.sh index 0e1dfa5131d6a7..ce955369505dbf 100755 --- a/script/lib/bump-version.sh +++ b/script/lib/bump-version.sh @@ -12,7 +12,7 @@ if [[ -n $(git status --short --untracked-files=no) ]]; then exit 1 fi -which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl +which cargo-set-version > /dev/null || cargo install cargo-edit which jq > /dev/null || brew install jq cargo set-version --package $package --bump $version_increment cargo check --quiet From 1c5be9de4e8c17419480a922f33f513a2ff197de Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 27 Sep 2024 22:02:32 +0200 Subject: [PATCH 379/762] Capitalize tooltip labels on buffer search (#18458) For consistency, as this seems to be the pattern we're using overall for labels and buttons. --- Release Notes: - N/A --- crates/search/src/buffer_search.rs | 14 +++++++------- crates/search/src/search.rs | 6 +++--- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 6e660a963b987d..42b267c3c9563a 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -288,7 +288,7 @@ impl Render for BufferSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Toggle search selection", + "Toggle Search Selection", &ToggleSelection, &focus_handle, cx, @@ -308,7 +308,7 @@ impl Render for BufferSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Select all matches", + "Select All Matches", &SelectAllMatches, &focus_handle, cx, @@ -319,14 +319,14 @@ impl Render for BufferSearchBar { .child(render_nav_button( ui::IconName::ChevronLeft, self.active_match_index.is_some(), - "Select previous match", + "Select Previous Match", &SelectPrevMatch, focus_handle.clone(), )) .child(render_nav_button( ui::IconName::ChevronRight, self.active_match_index.is_some(), - "Select next match", + "Select Next Match", &SelectNextMatch, focus_handle.clone(), )) @@ -373,7 +373,7 @@ impl Render for BufferSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Replace next match", + "Replace Next Match", &ReplaceNext, &focus_handle, cx, @@ -390,7 +390,7 @@ impl Render for BufferSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Replace all matches", + "Replace All Matches", &ReplaceAll, &focus_handle, cx, @@ -442,7 +442,7 @@ impl Render for BufferSearchBar { div.child( IconButton::new(SharedString::from("Close"), IconName::Close) .tooltip(move |cx| { - Tooltip::for_action("Close search bar", &Dismiss, cx) + Tooltip::for_action("Close Search Bar", &Dismiss, cx) }) .on_click(cx.listener(|this, _: &ClickEvent, cx| { this.dismiss(&Dismiss, cx) diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index d13a12576b0f94..0ceb8e710b5f41 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -53,10 +53,10 @@ bitflags! { impl SearchOptions { pub fn label(&self) -> &'static str { match *self { - SearchOptions::WHOLE_WORD => "Match whole words", - SearchOptions::CASE_SENSITIVE => "Match case sensitively", + SearchOptions::WHOLE_WORD => "Match Whole Words", + SearchOptions::CASE_SENSITIVE => "Match Case Sensitively", SearchOptions::INCLUDE_IGNORED => "Also search files ignored by configuration", - SearchOptions::REGEX => "Use regular expressions", + SearchOptions::REGEX => "Use Regular Expressions", _ => panic!("{:?} is not a named SearchOption", self), } } From 689da9d0b1b8ff32ebecdd2dbe8d5c93b4c7a3bf Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 27 Sep 2024 13:13:55 -0700 Subject: [PATCH 380/762] Move git hunk controls to the left side (#18460) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ![Screenshot 2024-09-27 at 1 05 14 PM](https://github.com/user-attachments/assets/260a7d05-daa8-4a22-92bc-3b956035227f) Release Notes: - N/A --- crates/editor/src/hunk_diff.rs | 131 ++++++++++++++++----------------- 1 file changed, 64 insertions(+), 67 deletions(-) diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index e819032471f442..3e18b992c1dae8 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -513,49 +513,7 @@ impl Editor { }); } }), - ), - ) - .child( - h_flex() - .gap_2() - .pr_6() - .child({ - let focus = editor.focus_handle(cx); - PopoverMenu::new("hunk-controls-dropdown") - .trigger( - IconButton::new( - "toggle_editor_selections_icon", - IconName::EllipsisVertical, - ) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .style(ButtonStyle::Subtle) - .selected( - hunk_controls_menu_handle.is_deployed(), - ) - .when( - !hunk_controls_menu_handle.is_deployed(), - |this| { - this.tooltip(|cx| { - Tooltip::text("Hunk Controls", cx) - }) - }, - ), - ) - .anchor(AnchorCorner::TopRight) - .with_handle(hunk_controls_menu_handle) - .menu(move |cx| { - let focus = focus.clone(); - let menu = - ContextMenu::build(cx, move |menu, _| { - menu.context(focus.clone()).action( - "Discard All", - RevertFile.boxed_clone(), - ) - }); - Some(menu) - }) - }) + ) .child( IconButton::new("discard", IconName::RotateCcw) .shape(IconButtonShape::Square) @@ -601,31 +559,70 @@ impl Editor { } }), ) - .child( - IconButton::new("collapse", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Collapse Hunk", - &ToggleHunkDiff, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); + .child({ + let focus = editor.focus_handle(cx); + PopoverMenu::new("hunk-controls-dropdown") + .trigger( + IconButton::new( + "toggle_editor_selections_icon", + IconName::EllipsisVertical, + ) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .style(ButtonStyle::Subtle) + .selected( + hunk_controls_menu_handle.is_deployed(), + ) + .when( + !hunk_controls_menu_handle.is_deployed(), + |this| { + this.tooltip(|cx| { + Tooltip::text("Hunk Controls", cx) + }) + }, + ), + ) + .anchor(AnchorCorner::TopRight) + .with_handle(hunk_controls_menu_handle) + .menu(move |cx| { + let focus = focus.clone(); + let menu = + ContextMenu::build(cx, move |menu, _| { + menu.context(focus.clone()).action( + "Discard All", + RevertFile.boxed_clone(), + ) }); - } - }), - ), + Some(menu) + }) + }), + ) + .child( + h_flex().gap_2().pr_6().child( + IconButton::new("collapse", IconName::Close) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Collapse Hunk", + &ToggleHunkDiff, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ), ), ) .into_any_element() From 0daa070448d3a5078cd274ddbd5ec18e425c63d3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 27 Sep 2024 13:48:37 -0700 Subject: [PATCH 381/762] More git hunk highlighting fixes (#18459) Follow-up to https://github.com/zed-industries/zed/pull/18454 Release Notes: - N/A --- crates/assistant/src/inline_assistant.rs | 8 +-- crates/editor/src/editor.rs | 63 +++++++++---------- crates/editor/src/editor_tests.rs | 11 ++-- crates/editor/src/hunk_diff.rs | 29 ++------- crates/editor/src/test/editor_test_context.rs | 32 +++------- crates/go_to_line/src/go_to_line.rs | 22 ++++--- crates/outline/src/outline.rs | 4 +- 7 files changed, 69 insertions(+), 100 deletions(-) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index e2f2fa190d3977..fac70f233c6562 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1208,7 +1208,7 @@ impl InlineAssistant { editor.set_read_only(true); editor.set_show_inline_completions(Some(false), cx); editor.highlight_rows::( - Anchor::min()..=Anchor::max(), + Anchor::min()..Anchor::max(), cx.theme().status().deleted_background, false, cx, @@ -2557,7 +2557,7 @@ enum CodegenStatus { #[derive(Default)] struct Diff { deleted_row_ranges: Vec<(Anchor, RangeInclusive)>, - inserted_row_ranges: Vec>, + inserted_row_ranges: Vec>, } impl Diff { @@ -3103,7 +3103,7 @@ impl CodegenAlternative { new_end_row, new_snapshot.line_len(MultiBufferRow(new_end_row)), )); - self.diff.inserted_row_ranges.push(start..=end); + self.diff.inserted_row_ranges.push(start..end); new_row += lines; } } @@ -3181,7 +3181,7 @@ impl CodegenAlternative { new_end_row, new_snapshot.line_len(MultiBufferRow(new_end_row)), )); - inserted_row_ranges.push(start..=end); + inserted_row_ranges.push(start..end); new_row += line_count; } } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 48785dbaa55cfc..b604f388debd58 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -821,7 +821,7 @@ impl SelectionHistory { struct RowHighlight { index: usize, - range: RangeInclusive, + range: Range, color: Hsla, should_autoscroll: bool, } @@ -11502,9 +11502,11 @@ impl Editor { /// Adds a row highlight for the given range. If a row has multiple highlights, the /// last highlight added will be used. + /// + /// If the range ends at the beginning of a line, then that line will not be highlighted. pub fn highlight_rows( &mut self, - range: RangeInclusive, + range: Range, color: Hsla, should_autoscroll: bool, cx: &mut ViewContext, @@ -11513,8 +11515,8 @@ impl Editor { let row_highlights = self.highlighted_rows.entry(TypeId::of::()).or_default(); let ix = row_highlights.binary_search_by(|highlight| { Ordering::Equal - .then_with(|| highlight.range.start().cmp(&range.start(), &snapshot)) - .then_with(|| highlight.range.end().cmp(&range.end(), &snapshot)) + .then_with(|| highlight.range.start.cmp(&range.start, &snapshot)) + .then_with(|| highlight.range.end.cmp(&range.end, &snapshot)) }); if let Err(mut ix) = ix { @@ -11527,18 +11529,13 @@ impl Editor { let prev_highlight = &mut row_highlights[ix - 1]; if prev_highlight .range - .end() - .cmp(&range.start(), &snapshot) + .end + .cmp(&range.start, &snapshot) .is_ge() { ix -= 1; - if prev_highlight - .range - .end() - .cmp(&range.end(), &snapshot) - .is_lt() - { - prev_highlight.range = *prev_highlight.range.start()..=*range.end(); + if prev_highlight.range.end.cmp(&range.end, &snapshot).is_lt() { + prev_highlight.range.end = range.end; } merged = true; prev_highlight.index = index; @@ -11564,18 +11561,17 @@ impl Editor { let highlight = &row_highlights[ix]; if next_highlight .range - .start() - .cmp(&highlight.range.end(), &snapshot) + .start + .cmp(&highlight.range.end, &snapshot) .is_le() { if next_highlight .range - .end() - .cmp(&highlight.range.end(), &snapshot) + .end + .cmp(&highlight.range.end, &snapshot) .is_gt() { - row_highlights[ix].range = - *highlight.range.start()..=*next_highlight.range.end(); + row_highlights[ix].range.end = next_highlight.range.end; } row_highlights.remove(ix + 1); } else { @@ -11597,15 +11593,12 @@ impl Editor { let mut ranges_to_remove = ranges_to_remove.iter().peekable(); row_highlights.retain(|highlight| { while let Some(range_to_remove) = ranges_to_remove.peek() { - match range_to_remove.end.cmp(&highlight.range.start(), &snapshot) { - Ordering::Less => { + match range_to_remove.end.cmp(&highlight.range.start, &snapshot) { + Ordering::Less | Ordering::Equal => { ranges_to_remove.next(); } - Ordering::Equal => { - return false; - } Ordering::Greater => { - match range_to_remove.start.cmp(&highlight.range.end(), &snapshot) { + match range_to_remove.start.cmp(&highlight.range.end, &snapshot) { Ordering::Less | Ordering::Equal => { return false; } @@ -11625,9 +11618,7 @@ impl Editor { } /// For a highlight given context type, gets all anchor ranges that will be used for row highlighting. - pub fn highlighted_rows( - &self, - ) -> impl '_ + Iterator, Hsla)> { + pub fn highlighted_rows(&self) -> impl '_ + Iterator, Hsla)> { self.highlighted_rows .get(&TypeId::of::()) .map_or(&[] as &[_], |vec| vec.as_slice()) @@ -11650,9 +11641,17 @@ impl Editor { .fold( BTreeMap::::new(), |mut unique_rows, highlight| { - let start_row = highlight.range.start().to_display_point(&snapshot).row(); - let end_row = highlight.range.end().to_display_point(&snapshot).row(); - for row in start_row.0..=end_row.0 { + let start = highlight.range.start.to_display_point(&snapshot); + let end = highlight.range.end.to_display_point(&snapshot); + let start_row = start.row().0; + let end_row = if highlight.range.end.text_anchor != text::Anchor::MAX + && end.column() == 0 + { + end.row().0.saturating_sub(1) + } else { + end.row().0 + }; + for row in start_row..=end_row { let used_index = used_highlight_orders.entry(row).or_insert(highlight.index); if highlight.index >= *used_index { @@ -11674,7 +11673,7 @@ impl Editor { .flat_map(|highlighted_rows| highlighted_rows.iter()) .filter_map(|highlight| { if highlight.should_autoscroll { - Some(highlight.range.start().to_display_point(snapshot).row()) + Some(highlight.range.start.to_display_point(snapshot).row()) } else { None } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index b17d94a5eb0f03..249d0a474641d0 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -11832,7 +11832,6 @@ async fn test_edits_around_expanded_insertion_hunks( ); cx.update_editor(|editor, cx| { - editor.move_up(&MoveUp, cx); editor.delete_line(&DeleteLine, cx); }); executor.run_until_parked(); @@ -11846,7 +11845,7 @@ async fn test_edits_around_expanded_insertion_hunks( + const B: u32 = 42; + const C: u32 = 42; + const D: u32 = 42; - + + + const E: u32 = 42; fn main() { println!("hello"); @@ -11872,8 +11871,8 @@ async fn test_edits_around_expanded_insertion_hunks( use some::mod2; const A: u32 = 42; + const B: u32 = 42; ˇ - fn main() { println!("hello"); @@ -11889,8 +11888,8 @@ async fn test_edits_around_expanded_insertion_hunks( use some::mod2; const A: u32 = 42; + + const B: u32 = 42; - + fn main() { println!("hello"); @@ -11907,7 +11906,9 @@ async fn test_edits_around_expanded_insertion_hunks( executor.run_until_parked(); cx.assert_diff_hunks( r#" - + use some::mod1; + - use some::mod2; + - - const A: u32 = 42; fn main() { diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 3e18b992c1dae8..cf2a857b67d2ff 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -6,10 +6,7 @@ use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, MultiBufferSnapshot, ToPoint, }; -use std::{ - ops::{Range, RangeInclusive}, - sync::Arc, -}; +use std::{ops::Range, sync::Arc}; use ui::{ prelude::*, ActiveTheme, ContextMenu, IconButtonShape, InteractiveElement, IntoElement, ParentElement, PopoverMenu, Styled, Tooltip, ViewContext, VisualContext, @@ -19,7 +16,7 @@ use util::RangeExt; use crate::{ editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, - Editor, EditorElement, EditorSnapshot, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, + Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; @@ -298,7 +295,7 @@ impl Editor { } DiffHunkStatus::Added => { self.highlight_rows::( - to_inclusive_row_range(hunk_start..hunk_end, &snapshot), + hunk_start..hunk_end, added_hunk_color(cx), false, cx, @@ -307,7 +304,7 @@ impl Editor { } DiffHunkStatus::Modified => { self.highlight_rows::( - to_inclusive_row_range(hunk_start..hunk_end, &snapshot), + hunk_start..hunk_end, added_hunk_color(cx), false, cx, @@ -960,7 +957,7 @@ fn editor_with_deleted_text( editor.set_read_only(true); editor.set_show_inline_completions(Some(false), cx); editor.highlight_rows::( - Anchor::min()..=Anchor::max(), + Anchor::min()..Anchor::max(), deleted_color, false, cx, @@ -1039,22 +1036,6 @@ fn buffer_diff_hunk( None } -fn to_inclusive_row_range( - row_range: Range, - snapshot: &EditorSnapshot, -) -> RangeInclusive { - let mut end = row_range.end.to_point(&snapshot.buffer_snapshot); - if end.column == 0 && end.row > 0 { - end = Point::new( - end.row - 1, - snapshot - .buffer_snapshot - .line_len(MultiBufferRow(end.row - 1)), - ); - } - row_range.start..=snapshot.buffer_snapshot.anchor_after(end) -} - impl DisplayDiffHunk { pub fn start_display_row(&self) -> DisplayRow { match self { diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 2ec4f4a3b7b7bc..7234d97c5b77e4 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -9,7 +9,6 @@ use gpui::{ AnyWindowHandle, AppContext, Keystroke, ModelContext, Pixels, Point, View, ViewContext, VisualTestContext, WindowHandle, }; -use indoc::indoc; use itertools::Itertools; use language::{Buffer, BufferSnapshot, LanguageRegistry}; use multi_buffer::{ExcerptRange, ToPoint}; @@ -337,8 +336,9 @@ impl EditorTestContext { let insertions = editor .highlighted_rows::() .map(|(range, _)| { - range.start().to_point(&snapshot.buffer_snapshot).row - ..range.end().to_point(&snapshot.buffer_snapshot).row + 1 + let start = range.start.to_point(&snapshot.buffer_snapshot); + let end = range.end.to_point(&snapshot.buffer_snapshot); + start.row..end.row }) .collect::>(); let deletions = editor @@ -384,13 +384,8 @@ impl EditorTestContext { /// See the `util::test::marked_text_ranges` function for more information. #[track_caller] pub fn assert_editor_state(&mut self, marked_text: &str) { - let (unmarked_text, expected_selections) = marked_text_ranges(marked_text, true); - let buffer_text = self.buffer_text(); - - if buffer_text != unmarked_text { - panic!("Unmarked text doesn't match buffer text\nBuffer text: {buffer_text:?}\nUnmarked text: {unmarked_text:?}\nRaw buffer text\n{buffer_text}\nRaw unmarked text\n{unmarked_text}"); - } - + let (expected_text, expected_selections) = marked_text_ranges(marked_text, true); + pretty_assertions::assert_eq!(self.buffer_text(), expected_text, "unexpected buffer text"); self.assert_selections(expected_selections, marked_text.to_string()) } @@ -463,20 +458,11 @@ impl EditorTestContext { let actual_marked_text = generate_marked_text(&self.buffer_text(), &actual_selections, true); if expected_selections != actual_selections { - panic!( - indoc! {" - - {}Editor has unexpected selections. - - Expected selections: - {} - - Actual selections: - {} - "}, - self.assertion_context(), - expected_marked_text, + pretty_assertions::assert_eq!( actual_marked_text, + expected_marked_text, + "{}Editor has unexpected selections", + self.assertion_context(), ); } } diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index fd631648c2c759..0e9482b7594144 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -116,11 +116,13 @@ impl GoToLine { if let Some(point) = self.point_from_query(cx) { self.active_editor.update(cx, |active_editor, cx| { let snapshot = active_editor.snapshot(cx).display_snapshot; - let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left); - let anchor = snapshot.buffer_snapshot.anchor_before(point); + let start = snapshot.buffer_snapshot.clip_point(point, Bias::Left); + let end = start + Point::new(1, 0); + let start = snapshot.buffer_snapshot.anchor_before(start); + let end = snapshot.buffer_snapshot.anchor_after(end); active_editor.clear_row_highlights::(); active_editor.highlight_rows::( - anchor..=anchor, + start..end, cx.theme().colors().editor_highlighted_line_background, true, cx, @@ -244,13 +246,13 @@ mod tests { field_1: i32, // display line 3 field_2: i32, // display line 4 } // display line 5 - // display line 7 - struct Another { // display line 8 - field_1: i32, // display line 9 - field_2: i32, // display line 10 - field_3: i32, // display line 11 - field_4: i32, // display line 12 - } // display line 13 + // display line 6 + struct Another { // display line 7 + field_1: i32, // display line 8 + field_2: i32, // display line 9 + field_3: i32, // display line 10 + field_4: i32, // display line 11 + } // display line 12 "} }), ) diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 520311b6f3c625..1d82d06ad85705 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -143,7 +143,7 @@ impl OutlineViewDelegate { self.active_editor.update(cx, |active_editor, cx| { active_editor.clear_row_highlights::(); active_editor.highlight_rows::( - outline_item.range.start..=outline_item.range.end, + outline_item.range.start..outline_item.range.end, cx.theme().colors().editor_highlighted_line_background, true, cx, @@ -245,7 +245,7 @@ impl PickerDelegate for OutlineViewDelegate { .next(); if let Some((rows, _)) = highlight { active_editor.change_selections(Some(Autoscroll::center()), cx, |s| { - s.select_ranges([*rows.start()..*rows.start()]) + s.select_ranges([rows.start..rows.start]) }); active_editor.clear_row_highlights::(); active_editor.focus(cx); From 3737d4eb4fa9e576204a367b020ba189ac4aa087 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 27 Sep 2024 23:25:02 +0200 Subject: [PATCH 382/762] Add tooltip for code actions icon button (#18461) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I have just recently discovered this keybinding myself out of talking to folks, ha. The tooltip here might ease the discovery for other folks in the future. Screenshot 2024-09-27 at 11 04 28 PM --- Release Notes: - N/A --- crates/editor/src/editor.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b604f388debd58..d1ca70f705ce56 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5368,6 +5368,19 @@ impl Editor { .icon_size(IconSize::XSmall) .icon_color(Color::Muted) .selected(is_active) + .tooltip({ + let focus_handle = self.focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle Code Actions", + &ToggleCodeActions { + deployed_from_indicator: None, + }, + &focus_handle, + cx, + ) + } + }) .on_click(cx.listener(move |editor, _e, cx| { editor.focus(cx); editor.toggle_code_actions( From 675673ed5462711916e7824528698fc235d679b2 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Sat, 28 Sep 2024 01:45:40 +0200 Subject: [PATCH 383/762] Fine-tune hunk control spacing (#18463) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Screenshot 2024-09-28 at 1 09 35 AM --- Release Notes: - N/A --- crates/editor/src/hunk_diff.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index cf2a857b67d2ff..4e7a0f05f4a25b 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -338,7 +338,7 @@ impl Editor { hunk: &HoveredHunk, cx: &mut ViewContext<'_, Editor>, ) -> BlockProperties { - let border_color = cx.theme().colors().border_disabled; + let border_color = cx.theme().colors().border_variant; let gutter_color = match hunk.status { DiffHunkStatus::Added => cx.theme().status().created, DiffHunkStatus::Modified => cx.theme().status().modified, @@ -381,14 +381,15 @@ impl Editor { ) .child( h_flex() + .pl_1p5() + .pr_6() .size_full() .justify_between() .border_t_1() .border_color(border_color) .child( h_flex() - .gap_2() - .pl_6() + .gap_1() .child( IconButton::new("next-hunk", IconName::ArrowDown) .shape(IconButtonShape::Square) @@ -595,7 +596,7 @@ impl Editor { }), ) .child( - h_flex().gap_2().pr_6().child( + div().child( IconButton::new("collapse", IconName::Close) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) From 1021f0e28808285e3d301fcb9ccf9af40854ea98 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Sat, 28 Sep 2024 14:21:13 -0400 Subject: [PATCH 384/762] Show release notes locally when showing update notification (#18486) Closes https://github.com/zed-industries/zed/issues/17527 I think we are ok to switch to using the local action now. There are a few things we don't support, like media, but we don't include media directly too often, and I think this might help push the community to maybe add support for it. That being said, I updated the markdown coming back from the endpoint to include links to the web version of the release notes, so they can always hop over to that version, if they would like. https://github.com/user-attachments/assets/b4d207a7-1640-48f1-91d0-94537f74116c All forming of the Markdown happens in the endpoint, so if someone with a better eye wants to update this, you can do that here: https://github.com/zed-industries/zed.dev/blob/0e5923e3e7d1caa8b4bf32d0a7f8999b34dbe64c/src/pages/api/release_notes/v2/%5Bchannel_type%5D/%5Bversion%5D.ts#L50-L62 Release Notes: - Changed the `view the release notes` button in the update toast to trigger the local release notes action. --- crates/auto_update/src/auto_update.rs | 12 +++++++----- crates/auto_update/src/update_notification.rs | 19 ++++++++++++++----- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 1fe89cce0f9c4e..60d6369ee869c6 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -345,15 +345,17 @@ pub fn notify_of_any_new_update(cx: &mut ViewContext) -> Option<()> { let should_show_notification = should_show_notification.await?; if should_show_notification { workspace.update(&mut cx, |workspace, cx| { + let workspace_handle = workspace.weak_handle(); workspace.show_notification( NotificationId::unique::(), cx, - |cx| cx.new_view(|_| UpdateNotification::new(version)), + |cx| cx.new_view(|_| UpdateNotification::new(version, workspace_handle)), ); - updater - .read(cx) - .set_should_show_update_notification(false, cx) - .detach_and_log_err(cx); + updater.update(cx, |updater, cx| { + updater + .set_should_show_update_notification(false, cx) + .detach_and_log_err(cx); + }); })?; } anyhow::Ok(()) diff --git a/crates/auto_update/src/update_notification.rs b/crates/auto_update/src/update_notification.rs index 66028c24011995..7568a0eb1a94e7 100644 --- a/crates/auto_update/src/update_notification.rs +++ b/crates/auto_update/src/update_notification.rs @@ -1,13 +1,18 @@ use gpui::{ div, DismissEvent, EventEmitter, InteractiveElement, IntoElement, ParentElement, Render, - SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, + SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, WeakView, }; use menu::Cancel; use release_channel::ReleaseChannel; -use workspace::ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt}; +use util::ResultExt; +use workspace::{ + ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt}, + Workspace, +}; pub struct UpdateNotification { version: SemanticVersion, + workspace: WeakView, } impl EventEmitter for UpdateNotification {} @@ -41,7 +46,11 @@ impl Render for UpdateNotification { .child(Label::new("View the release notes")) .cursor_pointer() .on_click(cx.listener(|this, _, cx| { - crate::view_release_notes(&Default::default(), cx); + this.workspace + .update(cx, |workspace, cx| { + crate::view_release_notes_locally(workspace, cx); + }) + .log_err(); this.dismiss(&menu::Cancel, cx) })), ) @@ -49,8 +58,8 @@ impl Render for UpdateNotification { } impl UpdateNotification { - pub fn new(version: SemanticVersion) -> Self { - Self { version } + pub fn new(version: SemanticVersion, workspace: WeakView) -> Self { + Self { version, workspace } } pub fn dismiss(&mut self, _: &Cancel, cx: &mut ViewContext) { From 8aeab4800c6133f4bb51541a01acb630bae739e8 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Sat, 28 Sep 2024 15:20:32 -0400 Subject: [PATCH 385/762] Continue to redirect to GitHub commits for nightly and dev release notes (#18487) We are now using the `view release notes locally` action when clicking on the update toast - the endpoint for this action does not currently return anything for valid for these channels, as we don't have support yet for diffing between these builds, so for now, [continue to do what the `view release notes` action did and just send the user to the commit view on GitHub](https://github.com/zed-industries/zed/blob/caffb2733f8e859637050e3708dbd10b6e409762/crates/auto_update/src/auto_update.rs#L255-L260). It is a bit counterintuitive to send the user to the browser when using the "local" action, but this is just a patch in the interim. If we make adjustments to our channels to keep the nightly tag stable and add some sort of unique suffix, like a timestamp, we can then adjust things to return these in the request body and show them in the editor. Release Notes: - N/A --- crates/auto_update/src/auto_update.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 60d6369ee869c6..2c93ee4171c8b8 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -264,6 +264,18 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<( fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext) { let release_channel = ReleaseChannel::global(cx); + + let url = match release_channel { + ReleaseChannel::Nightly => Some("https://github.com/zed-industries/zed/commits/nightly/"), + ReleaseChannel::Dev => Some("https://github.com/zed-industries/zed/commits/main/"), + _ => None, + }; + + if let Some(url) = url { + cx.open_url(url); + return; + } + let version = AppVersion::global(cx).to_string(); let client = client::Client::global(cx).http_client(); From 84ce81caf155fefb60b1a26e44b14b1a28cfb2cd Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Sun, 29 Sep 2024 10:30:48 -0600 Subject: [PATCH 386/762] Pass `Summary::Context` to `Item::summarize` (#18510) We are going to use this in the multi-buffer to produce a summary for an `Excerpt` that contains a `Range`. Release Notes: - N/A Co-authored-by: Nathan --- crates/channel/src/channel_chat.rs | 2 +- crates/editor/src/display_map/block_map.rs | 2 +- crates/editor/src/display_map/crease_map.rs | 2 +- crates/editor/src/display_map/fold_map.rs | 4 ++-- crates/editor/src/display_map/inlay_map.rs | 2 +- crates/editor/src/display_map/wrap_map.rs | 2 +- crates/editor/src/git/blame.rs | 2 +- crates/git/src/diff.rs | 2 +- crates/gpui/src/elements/list.rs | 2 +- crates/language/src/diagnostic_set.rs | 2 +- crates/language/src/syntax_map.rs | 2 +- crates/multi_buffer/src/multi_buffer.rs | 4 ++-- crates/notifications/src/notification_store.rs | 2 +- crates/rope/src/rope.rs | 2 +- crates/sum_tree/src/sum_tree.rs | 12 ++++++------ crates/sum_tree/src/tree_map.rs | 2 +- crates/text/src/locator.rs | 2 +- crates/text/src/operation_queue.rs | 2 +- crates/text/src/text.rs | 6 +++--- crates/text/src/undo_map.rs | 2 +- crates/worktree/src/worktree.rs | 4 ++-- 21 files changed, 31 insertions(+), 31 deletions(-) diff --git a/crates/channel/src/channel_chat.rs b/crates/channel/src/channel_chat.rs index 1a9e46db0460a9..e5b5b74c16262b 100644 --- a/crates/channel/src/channel_chat.rs +++ b/crates/channel/src/channel_chat.rs @@ -808,7 +808,7 @@ pub fn mentions_to_proto(mentions: &[(Range, UserId)]) -> Vec Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { ChannelMessageSummary { max_id: self.id, count: 1, diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index efa026a56c6101..52e0ca2486d25d 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1360,7 +1360,7 @@ impl<'a> Iterator for BlockBufferRows<'a> { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.summary.clone() } } diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index c3f2b0061ac732..531c650c43a6ed 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -291,7 +291,7 @@ impl sum_tree::Summary for ItemSummary { impl sum_tree::Item for CreaseItem { type Summary = ItemSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary { ItemSummary { range: self.crease.range.clone(), } diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 37983030b8e1a7..5eb26ff9693883 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -944,7 +944,7 @@ struct TransformSummary { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.summary.clone() } } @@ -1004,7 +1004,7 @@ impl Default for FoldRange { impl sum_tree::Item for Fold { type Summary = FoldSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary { FoldSummary { start: self.range.start, end: self.range.end, diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 712db45e3f61a7..d4e39f2df9270e 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -74,7 +74,7 @@ impl Inlay { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { match self { Transform::Isomorphic(summary) => TransformSummary { input: summary.clone(), diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 564bba2158030e..dc4d93058cdf7f 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -917,7 +917,7 @@ impl Transform { impl sum_tree::Item for Transform { type Summary = TransformSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.summary.clone() } } diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 733d42d0c57dda..303ead16b22319 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -29,7 +29,7 @@ pub struct GitBlameEntrySummary { impl sum_tree::Item for GitBlameEntry { type Summary = GitBlameEntrySummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { GitBlameEntrySummary { rows: self.rows } } } diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 1f7930ce1442da..baad824577d301 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -34,7 +34,7 @@ struct InternalDiffHunk { impl sum_tree::Item for InternalDiffHunk { type Summary = DiffHunkSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary { DiffHunkSummary { buffer_range: self.buffer_range.clone(), } diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index d03392867b5ee2..6ac6d2a9bf8d7d 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -858,7 +858,7 @@ impl Styled for List { impl sum_tree::Item for ListItem { type Summary = ListItemSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _: &()) -> Self::Summary { match self { ListItem::Unmeasured { focus_handle } => ListItemSummary { count: 1, diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index c35659d9bbd4d1..38d4216beeabc4 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -224,7 +224,7 @@ impl DiagnosticSet { impl sum_tree::Item for DiagnosticEntry { type Summary = Summary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary { Summary { start: self.range.start, end: self.range.end, diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index 55177f79620db7..8617696cc471ec 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -1739,7 +1739,7 @@ impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> impl sum_tree::Item for SyntaxLayerEntry { type Summary = SyntaxLayerSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &BufferSnapshot) -> Self::Summary { SyntaxLayerSummary { min_depth: self.depth, max_depth: self.depth, diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 828b39967d9e1f..7aa733ba8fa373 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -4596,7 +4596,7 @@ impl fmt::Debug for Excerpt { impl sum_tree::Item for Excerpt { type Summary = ExcerptSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { let mut text = self.text_summary.clone(); if self.has_trailing_newline { text += TextSummary::from("\n"); @@ -4613,7 +4613,7 @@ impl sum_tree::Item for Excerpt { impl sum_tree::Item for ExcerptIdMapping { type Summary = ExcerptId; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.id } } diff --git a/crates/notifications/src/notification_store.rs b/crates/notifications/src/notification_store.rs index 48fcb5dfbb830a..5c3de53ee1895c 100644 --- a/crates/notifications/src/notification_store.rs +++ b/crates/notifications/src/notification_store.rs @@ -455,7 +455,7 @@ impl EventEmitter for NotificationStore {} impl sum_tree::Item for NotificationEntry { type Summary = NotificationSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { NotificationSummary { max_id: self.id, count: 1, diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 56fe7fc0544471..68ff7d5c6956aa 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -1159,7 +1159,7 @@ impl Chunk { impl sum_tree::Item for Chunk { type Summary = ChunkSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { ChunkSummary::from(self.0.as_str()) } } diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 7013dc66fd1ed7..fbfe3b06f3ab4c 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -20,7 +20,7 @@ pub const TREE_BASE: usize = 6; pub trait Item: Clone { type Summary: Summary; - fn summary(&self) -> Self::Summary; + fn summary(&self, cx: &::Context) -> Self::Summary; } /// An [`Item`] whose summary has a specific key that can be used to identify it @@ -211,7 +211,7 @@ impl SumTree { while iter.peek().is_some() { let items: ArrayVec = iter.by_ref().take(2 * TREE_BASE).collect(); let item_summaries: ArrayVec = - items.iter().map(|item| item.summary()).collect(); + items.iter().map(|item| item.summary(cx)).collect(); let mut summary = item_summaries[0].clone(); for item_summary in &item_summaries[1..] { @@ -281,7 +281,7 @@ impl SumTree { .map(|items| { let items: ArrayVec = items.into_iter().collect(); let item_summaries: ArrayVec = - items.iter().map(|item| item.summary()).collect(); + items.iter().map(|item| item.summary(cx)).collect(); let mut summary = item_summaries[0].clone(); for item_summary in &item_summaries[1..] { ::add_summary(&mut summary, item_summary, cx); @@ -405,7 +405,7 @@ impl SumTree { if let Some((item, item_summary)) = items.last_mut().zip(item_summaries.last_mut()) { (f)(item); - *item_summary = item.summary(); + *item_summary = item.summary(cx); *summary = sum(item_summaries.iter(), cx); Some(summary.clone()) } else { @@ -461,7 +461,7 @@ impl SumTree { } pub fn push(&mut self, item: T, cx: &::Context) { - let summary = item.summary(); + let summary = item.summary(cx); self.append( SumTree(Arc::new(Node::Leaf { summary: summary.clone(), @@ -1352,7 +1352,7 @@ mod tests { impl Item for u8 { type Summary = IntegersSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { IntegersSummary { count: 1, sum: *self as usize, diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index c57226b681432a..9a4d952e93f226 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -224,7 +224,7 @@ where { type Summary = MapKey; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.key() } } diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index 7afc16f5815496..e38b9b58af22c2 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -69,7 +69,7 @@ impl Default for Locator { impl sum_tree::Item for Locator { type Summary = Locator; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.clone() } } diff --git a/crates/text/src/operation_queue.rs b/crates/text/src/operation_queue.rs index c7964f62674ca5..52b534a41b7395 100644 --- a/crates/text/src/operation_queue.rs +++ b/crates/text/src/operation_queue.rs @@ -107,7 +107,7 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey { impl Item for OperationItem { type Summary = OperationSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { OperationSummary { key: OperationKey::new(self.0.lamport_timestamp()), len: 1, diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 8bdc9fdb03d89b..80eafcf4eba2d5 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2617,7 +2617,7 @@ impl Fragment { impl sum_tree::Item for Fragment { type Summary = FragmentSummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &Option) -> Self::Summary { let mut max_version = clock::Global::new(); max_version.observe(self.timestamp); for deletion in &self.deletions { @@ -2688,7 +2688,7 @@ impl Default for FragmentSummary { impl sum_tree::Item for InsertionFragment { type Summary = InsertionFragmentKey; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { InsertionFragmentKey { timestamp: self.timestamp, split_offset: self.split_offset, @@ -2700,7 +2700,7 @@ impl sum_tree::KeyedItem for InsertionFragment { type Key = InsertionFragmentKey; fn key(&self) -> Self::Key { - sum_tree::Item::summary(self) + sum_tree::Item::summary(self, &()) } } diff --git a/crates/text/src/undo_map.rs b/crates/text/src/undo_map.rs index 4e670fd456068d..ed363cfc6b6d77 100644 --- a/crates/text/src/undo_map.rs +++ b/crates/text/src/undo_map.rs @@ -11,7 +11,7 @@ struct UndoMapEntry { impl sum_tree::Item for UndoMapEntry { type Summary = UndoMapKey; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { self.key } } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index d81c91132b9d39..a3daf8ea2eade7 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -3339,7 +3339,7 @@ impl EntryKind { impl sum_tree::Item for Entry { type Summary = EntrySummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { let non_ignored_count = if self.is_ignored || self.is_external { 0 } else { @@ -3434,7 +3434,7 @@ struct PathEntry { impl sum_tree::Item for PathEntry { type Summary = PathEntrySummary; - fn summary(&self) -> Self::Summary { + fn summary(&self, _cx: &()) -> Self::Summary { PathEntrySummary { max_id: self.id } } } From 5f35fa5d92d45269bf311f1aa5e9c1703e54ffc4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Thorben=20Kr=C3=B6ger?= Date: Sun, 29 Sep 2024 19:54:09 +0200 Subject: [PATCH 387/762] Associate `uv.lock` files with TOML (#18426) The `uv` python package manager uses the TOML for it's `uv.lock` file, see https://docs.astral.sh/uv/guides/projects/#uvlock. Ref #7808 Release Notes: - associate `uv.lock` files with the TOML language --- assets/settings/default.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 82778dc8f6c484..2a11a85d4efae4 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -769,7 +769,8 @@ "**/Zed/**/*.json", "tsconfig.json", "pyrightconfig.json" - ] + ], + "TOML": ["uv.lock"] }, /// By default use a recent system version of node, or install our own. /// You can override this to use a version of node that is not in $PATH with: From 250f2e76eb6f97fe097ee39863c621335762c102 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 30 Sep 2024 08:05:51 +0200 Subject: [PATCH 388/762] tasks: Display runnables at the start of folds (#18526) Release Notes: - Fixed task indicators not showing up at the starts of folds. --- crates/editor/src/element.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index e5c067e37ec3dc..1c35fa6bcdd4e8 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1646,7 +1646,16 @@ impl EditorElement { return None; } if snapshot.is_line_folded(multibuffer_row) { - return None; + // Skip folded indicators, unless it's the starting line of a fold. + if multibuffer_row + .0 + .checked_sub(1) + .map_or(false, |previous_row| { + snapshot.is_line_folded(MultiBufferRow(previous_row)) + }) + { + return None; + } } let button = editor.render_run_indicator( &self.style, From 77df7e56f7852aaa121cafb0bc0596c9b43d3680 Mon Sep 17 00:00:00 2001 From: Tom Wieczorek Date: Mon, 30 Sep 2024 08:34:41 +0200 Subject: [PATCH 389/762] settings: Make external formatter arguments optional (#18340) If specifying a formatter in the settings like this: "languages": { "foo": { "formatter": { "external": { "command": "/path/to/foo-formatter" } } } } Zed will show an error like this: Invalid user settings file data did not match any variant of untagged enum SingleOrVec This is because the arguments are not optional. The error is hard to understand, so let's make the arguments actually optional, which makes the above settings snippet valid. Release Notes: - Make external formatter arguments optional --- crates/collab/src/tests/integration_tests.rs | 2 +- crates/language/src/language_settings.rs | 2 +- crates/project/src/lsp_store.rs | 31 +++++++++++++------- 3 files changed, 22 insertions(+), 13 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index afc3e7cfb84ee2..615ad52e2ef367 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -4409,7 +4409,7 @@ async fn test_formatting_buffer( file.defaults.formatter = Some(SelectedFormatter::List(FormatterList( vec![Formatter::External { command: "awk".into(), - arguments: vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(), + arguments: Some(vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into()), }] .into(), ))); diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 2f1a7be2bf492d..d610ab09865ce3 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -661,7 +661,7 @@ pub enum Formatter { /// The external program to run. command: Arc, /// The arguments to pass to the program. - arguments: Arc<[String]>, + arguments: Option>, }, /// Files should be formatted using code actions executed by language servers. CodeActions(HashMap), diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 37922b7c2ee03b..a3763810e1a262 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -539,13 +539,19 @@ impl LocalLspStore { } Formatter::External { command, arguments } => { let buffer_abs_path = buffer_abs_path.as_ref().map(|path| path.as_path()); - Self::format_via_external_command(buffer, buffer_abs_path, command, arguments, cx) - .await - .context(format!( - "failed to format via external command {:?}", - command - ))? - .map(FormatOperation::External) + Self::format_via_external_command( + buffer, + buffer_abs_path, + command, + arguments.as_deref(), + cx, + ) + .await + .context(format!( + "failed to format via external command {:?}", + command + ))? + .map(FormatOperation::External) } Formatter::CodeActions(code_actions) => { let code_actions = deserialize_code_actions(code_actions); @@ -571,7 +577,7 @@ impl LocalLspStore { buffer: &Model, buffer_abs_path: Option<&Path>, command: &str, - arguments: &[String], + arguments: Option<&[String]>, cx: &mut AsyncAppContext, ) -> Result> { let working_dir_path = buffer.update(cx, |buffer, cx| { @@ -595,14 +601,17 @@ impl LocalLspStore { child.current_dir(working_dir_path); } - let mut child = child - .args(arguments.iter().map(|arg| { + if let Some(arguments) = arguments { + child.args(arguments.iter().map(|arg| { if let Some(buffer_abs_path) = buffer_abs_path { arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy()) } else { arg.replace("{buffer_path}", "Untitled") } - })) + })); + } + + let mut child = child .stdin(smol::process::Stdio::piped()) .stdout(smol::process::Stdio::piped()) .stderr(smol::process::Stdio::piped()) From e39695bf1c16b298552336fc2c063f4a88a46a45 Mon Sep 17 00:00:00 2001 From: Maksim Bondarenkov <119937608+ognevny@users.noreply.github.com> Date: Mon, 30 Sep 2024 09:38:49 +0300 Subject: [PATCH 390/762] docs: Update msys2 section in development/windows (#18385) merge after https://packages.msys2.org/packages/mingw-w64-clang-x86_64-zed is available. alternatively you can check the [queue](https://packages.msys2.org/queue) for build status Zed now compiles and runs under msys2/CLANG64 environment, so change the docs to give the users a choice of their environment Release Notes: - N/A --- docs/src/development/windows.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index 86eb21965e550e..67808186d1961b 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -63,16 +63,16 @@ cargo test --workspace ## Installing from msys2 -[MSYS2](https://msys2.org/) distribution provides Zed as a package [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed). To download the prebuilt binary, run +[MSYS2](https://msys2.org/) distribution provides Zed as a package [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed). The package is available for UCRT64 and CLANG64. To download it, run ```sh pacman -Syu -pacman -S mingw-w64-ucrt-x86_64-zed +pacman -S $MINGW_PACKAGE_PREFIX-zed ``` -then you can run `zed` in a UCRT64 shell. +then you can run `zed` in a shell. -You can see the [build script](https://github.com/msys2/MINGW-packages/blob/master/mingw-w64-zed/PKGBUILD) for more details. +You can see the [build script](https://github.com/msys2/MINGW-packages/blob/master/mingw-w64-zed/PKGBUILD) for more details on build process. > Please, report any issue in [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed) first. From 5b40debb5f042a1d4230ceb5f012e7d09a785fb2 Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Mon, 30 Sep 2024 09:54:22 +0300 Subject: [PATCH 391/762] Don't stop loading the env if direnv call fails (#18473) Before this we we would stop loading the environment if the call to direnv failed, which is not necessary in any way cc @mrnugget Release Notes: - Fixed the environment not loading if `direnv` mode is set to `direct` and `.envrc` is not allowed --- crates/project/src/environment.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index 641ad206c05b27..23d23c9dc6bb01 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -198,8 +198,9 @@ async fn load_shell_environment( anyhow::ensure!( direnv_output.status.success(), - "direnv exited with error {:?}", - direnv_output.status + "direnv exited with error {:?}. Stderr:\n{}", + direnv_output.status, + String::from_utf8_lossy(&direnv_output.stderr) ); let output = String::from_utf8_lossy(&direnv_output.stdout); @@ -214,7 +215,7 @@ async fn load_shell_environment( let direnv_environment = match load_direnv { DirenvSettings::ShellHook => None, - DirenvSettings::Direct => load_direnv_environment(dir).await?, + DirenvSettings::Direct => load_direnv_environment(dir).await.log_err().flatten(), } .unwrap_or(HashMap::default()); From 898d48a57497011f6b96c98ddee35b511398fbd9 Mon Sep 17 00:00:00 2001 From: Sylvain Brunerie Date: Mon, 30 Sep 2024 09:02:12 +0200 Subject: [PATCH 392/762] php: Add syntax highlighting inside heredoc strings (#18368) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PHP heredoc strings make it easy to define string literals over multiple lines: ```php $someString = << Date: Mon, 30 Sep 2024 10:09:13 +0300 Subject: [PATCH 393/762] gpui: Fix blur region on Plasma/Wayland (#18465) Once again aping after what winit does - since we always want to have the whole window blurred there is apparently no need to specify a blur region at all. Rounded corners would be the exception, but that is not possible with the current protocol (it is planned for the vendor-neutral version though!) This eliminates the problem where only a fixed region of the window would get blurred if the window was resized to be larger than at launch. Also a drive-by comment grammar fix :wink: Release Notes: - Fixed blur region handling on Plasma/Wayland --- crates/gpui/src/platform/linux/wayland/window.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/window.rs b/crates/gpui/src/platform/linux/wayland/window.rs index a0520f6b7056e5..329b6c185fbfe9 100644 --- a/crates/gpui/src/platform/linux/wayland/window.rs +++ b/crates/gpui/src/platform/linux/wayland/window.rs @@ -1046,8 +1046,8 @@ fn update_window(mut state: RefMut) { && state.decorations == WindowDecorations::Server { // Promise the compositor that this region of the window surface - // contains no transparent pixels. This allows the compositor to - // do skip whatever is behind the surface for better performance. + // contains no transparent pixels. This allows the compositor to skip + // updating whatever is behind the surface for better performance. state.surface.set_opaque_region(Some(®ion)); } else { state.surface.set_opaque_region(None); @@ -1057,7 +1057,6 @@ fn update_window(mut state: RefMut) { if state.background_appearance == WindowBackgroundAppearance::Blurred { if state.blur.is_none() { let blur = blur_manager.create(&state.surface, &state.globals.qh, ()); - blur.set_region(Some(®ion)); state.blur = Some(blur); } state.blur.as_ref().unwrap().commit(); From ed5eb725f93305642f02b10676ce56c8a7779d5d Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 30 Sep 2024 11:25:11 +0300 Subject: [PATCH 394/762] Improve language server log view split ergonomics (#18527) Allows to split log view, and opens it split on the right, same as the syntax tree view. Release Notes: - Improved language server log panel split ergonomics --- crates/language_tools/src/lsp_log.rs | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index d8fe3aa51840e4..aee39ff0a088e7 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -17,7 +17,7 @@ use ui::{prelude::*, Button, Checkbox, ContextMenu, Label, PopoverMenu, Selectio use workspace::{ item::{Item, ItemHandle}, searchable::{SearchEvent, SearchableItem, SearchableItemHandle}, - ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, + SplitDirection, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, WorkspaceId, }; const SEND_LINE: &str = "// Send:"; @@ -194,12 +194,11 @@ pub fn init(cx: &mut AppContext) { workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, cx| { let project = workspace.project().read(cx); if project.is_local() { - workspace.add_item_to_active_pane( + workspace.split_item( + SplitDirection::Right, Box::new(cx.new_view(|cx| { LspLogView::new(workspace.project().clone(), log_store.clone(), cx) })), - None, - true, cx, ); } @@ -912,6 +911,27 @@ impl Item for LspLogView { fn as_searchable(&self, handle: &View) -> Option> { Some(Box::new(handle.clone())) } + + fn clone_on_split( + &self, + _workspace_id: Option, + cx: &mut ViewContext, + ) -> Option> + where + Self: Sized, + { + Some(cx.new_view(|cx| { + let mut new_view = Self::new(self.project.clone(), self.log_store.clone(), cx); + if let Some(server_id) = self.current_server_id { + match self.active_entry_kind { + LogKind::Rpc => new_view.show_rpc_trace_for_server(server_id, cx), + LogKind::Trace => new_view.show_trace_for_server(server_id, cx), + LogKind::Logs => new_view.show_logs_for_server(server_id, cx), + } + } + new_view + })) + } } impl SearchableItem for LspLogView { From 1f72069b42543d380be892c415b7433519375771 Mon Sep 17 00:00:00 2001 From: VacheDesNeiges <33199153+VacheDesNeiges@users.noreply.github.com> Date: Mon, 30 Sep 2024 10:27:30 +0200 Subject: [PATCH 395/762] Improve C++ Tree-sitter queries (#18016) I made a few tree-sitter queries for improving the highlighting of C++. There is one query that I'm not totally certain about and would appreciate some feedback on it, the one that concerns attributes. Many editor only highlight the identifier as a keyword (This is the behavior implemented in this commit), while others, for example the tree-sitter plugin for neovim, tags the entire attribute for highlighting (double brackets included). I don't know which one is preferable. Here are screenshots of the two versions: ![image](https://github.com/user-attachments/assets/4e1b92c8-adc7-4900-a5b1-dc43c98f4c67) ![image](https://github.com/user-attachments/assets/290a13e3-5cb3-45cb-b6d9-3dc3e6a8af2d) Release Notes: - Fixed C++ attributes identifiers being wrongly highlighed through the tag "variable" - C++ attribute identifiers (nodiscard,deprecated, noreturn, etc.. ) are now highlighted through the tag "keyword" - Changed C++ primitives types (void, bool, int, size_t, etc.. ) to no longer be highlighted with the tag "keyword", they can now be highlighted by the tag "type.primitive". - Added a tag "concept" for highlighting C++ concept identifiers. (This tag name has been chosen to be the same than the one returned by clangd's semantic tokens) --- crates/languages/src/cpp/highlights.scm | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/cpp/highlights.scm b/crates/languages/src/cpp/highlights.scm index 4c9db569285e39..2df9ec2923be3d 100644 --- a/crates/languages/src/cpp/highlights.scm +++ b/crates/languages/src/cpp/highlights.scm @@ -2,6 +2,10 @@ (field_identifier) @property (namespace_identifier) @namespace +(concept_definition + (identifier) @concept) + + (call_expression function: (qualified_identifier name: (identifier) @function)) @@ -64,6 +68,14 @@ (auto) @type (type_identifier) @type +type :(primitive_type) @type.primitive + +(requires_clause + constraint: (template_type + name: (type_identifier) @concept)) + +(attribute + name: (identifier) @keyword) ((identifier) @constant (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) @@ -119,7 +131,6 @@ "using" "virtual" "while" - (primitive_type) (sized_type_specifier) (storage_class_specifier) (type_qualifier) From 707ccb04d2eb15129dce19e12388c1e5321aef57 Mon Sep 17 00:00:00 2001 From: Patrick MARIE Date: Mon, 30 Sep 2024 10:27:47 +0200 Subject: [PATCH 396/762] Restore paste on middle-click on linux (#18503) This is a partial revert of e6c1c51b37a, which removed the middle-click pasting on linux (both x11 & wayland). It also restores the `middle_click_paste` option behavior which became unexistent. Release Notes: - Restore Linux middle-click pasting. --- crates/editor/src/element.rs | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 1c35fa6bcdd4e8..98a5ff7f4dff50 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -636,11 +636,30 @@ impl EditorElement { cx.stop_propagation(); } else if end_selection && pending_nonempty_selections { cx.stop_propagation(); - } else if cfg!(target_os = "linux") - && event.button == MouseButton::Middle - && (!text_hitbox.is_hovered(cx) || editor.read_only(cx)) - { - return; + } else if cfg!(target_os = "linux") && event.button == MouseButton::Middle { + if !text_hitbox.is_hovered(cx) || editor.read_only(cx) { + return; + } + + #[cfg(target_os = "linux")] + if EditorSettings::get_global(cx).middle_click_paste { + if let Some(text) = cx.read_from_primary().and_then(|item| item.text()) { + let point_for_position = + position_map.point_for_position(text_hitbox.bounds, event.position); + let position = point_for_position.previous_valid; + + editor.select( + SelectPhase::Begin { + position, + add: false, + click_count: 1, + }, + cx, + ); + editor.insert(&text, cx); + } + cx.stop_propagation() + } } } From 57ad5778fad8cbdeada7815ca9c58fec63db90d8 Mon Sep 17 00:00:00 2001 From: 0hDEADBEAF <0xdeadbeaf@tutamail.com> Date: Mon, 30 Sep 2024 10:34:44 +0200 Subject: [PATCH 397/762] Add a way to explicitly specify RC toolkit path (#18402) Closes #18393 Release Notes: - Added a `ZED_RC_TOOLKIT_PATH` env variable so `winresource` crate can fetch the RC executable path correctly on some configurations --- crates/zed/build.rs | 7 +++++++ docs/src/development/windows.md | 27 +++++++++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/crates/zed/build.rs b/crates/zed/build.rs index c0d5c418750a4d..3013773f918342 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -56,6 +56,13 @@ fn main() { println!("cargo:rerun-if-changed={}", icon.display()); let mut res = winresource::WindowsResource::new(); + + // Depending on the security applied to the computer, winresource might fail + // fetching the RC path. Therefore, we add a way to explicitly specify the + // toolkit path, allowing winresource to use a valid RC path. + if let Some(explicit_rc_toolkit_path) = std::env::var("ZED_RC_TOOLKIT_PATH").ok() { + res.set_toolkit_path(explicit_rc_toolkit_path.as_str()); + } res.set_icon(icon.to_str().unwrap()); res.set("FileDescription", "Zed"); res.set("ProductName", "Zed"); diff --git a/docs/src/development/windows.md b/docs/src/development/windows.md index 67808186d1961b..03e8cae66b86d3 100644 --- a/docs/src/development/windows.md +++ b/docs/src/development/windows.md @@ -93,3 +93,30 @@ This error can happen if you are using the "rust-lld.exe" linker. Consider tryin If you are using a global config, consider moving the Zed repository to a nested directory and add a `.cargo/config.toml` with a custom linker config in the parent directory. See this issue for more information [#12041](https://github.com/zed-industries/zed/issues/12041) + +### Invalid RC path selected + +Sometimes, depending on the security rules applied to your laptop, you may get the following error while compiling Zed: + +``` +error: failed to run custom build command for `zed(C:\Users\USER\src\zed\crates\zed)` + +Caused by: + process didn't exit successfully: `C:\Users\USER\src\zed\target\debug\build\zed-b24f1e9300107efc\build-script-build` (exit code: 1) + --- stdout + cargo:rerun-if-changed=../../.git/logs/HEAD + cargo:rustc-env=ZED_COMMIT_SHA=25e2e9c6727ba9b77415588cfa11fd969612adb7 + cargo:rustc-link-arg=/stack:8388608 + cargo:rerun-if-changed=resources/windows/app-icon.ico + package.metadata.winresource does not exist + Selected RC path: 'bin\x64\rc.exe' + + --- stderr + The system cannot find the path specified. (os error 3) +warning: build failed, waiting for other jobs to finish... +``` + +In order to fix this issue, you can manually set the `ZED_RC_TOOLKIT_PATH` environment variable to the RC toolkit path. Usually, you can set it to: +`C:\Program Files (x86)\Windows Kits\10\bin\\x64`. + +See this [issue](https://github.com/zed-industries/zed/issues/18393) for more information. From 533416c5a96d3201dfd5e639a6a2d8fc9798a6fb Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 30 Sep 2024 12:38:57 +0200 Subject: [PATCH 398/762] terminal: Make CursorShape configurable (#18530) This builds on top of @Yevgen's #15840 and combines it with the settings names introduced in #17572. Closes #4731. Release Notes: - Added a setting for the terminal's default cursor shape. The setting is `{"terminal": {"cursor_shape": "block"}}``. Possible values: `block`, `bar`, `hollow`, `underline`. Demo: https://github.com/user-attachments/assets/96ed28c2-c222-436b-80cb-7cd63eeb47dd --- assets/settings/default.json | 12 +++++++ crates/project/src/terminals.rs | 1 + crates/terminal/src/terminal.rs | 13 +++++-- crates/terminal/src/terminal_settings.rs | 43 +++++++++++++++++++++++ crates/terminal_view/src/terminal_view.rs | 23 ++++++++++-- 5 files changed, 88 insertions(+), 4 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 2a11a85d4efae4..ba95c2cfcdaaa5 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -671,6 +671,18 @@ // 3. Always blink the cursor, ignoring the terminal mode // "blinking": "on", "blinking": "terminal_controlled", + // Default cursor shape for the terminal. + // 1. A block that surrounds the following character + // "block" + // 2. A vertical bar + // "bar" + // 3. An underline that runs along the following character + // "underscore" + // 4. A box drawn around the following character + // "hollow" + // + // Default: not set, defaults to "block" + "cursor_shape": null, // Set whether Alternate Scroll mode (code: ?1007) is active by default. // Alternate Scroll mode converts mouse scroll events into up / down key // presses when in the alternate screen (e.g. when running applications diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 136842d158604c..ababb3261b3376 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -216,6 +216,7 @@ impl Project { shell, env, Some(settings.blinking), + settings.cursor_shape.unwrap_or_default(), settings.alternate_scroll, settings.max_scroll_history_lines, window, diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 8f8982f02c4701..f9767b07d12612 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -18,7 +18,9 @@ use alacritty_terminal::{ Config, RenderableCursor, TermMode, }, tty::{self}, - vte::ansi::{ClearMode, Handler, NamedPrivateMode, PrivateMode}, + vte::ansi::{ + ClearMode, CursorStyle as AlacCursorStyle, Handler, NamedPrivateMode, PrivateMode, + }, Term, }; use anyhow::{bail, Result}; @@ -40,7 +42,7 @@ use serde::{Deserialize, Serialize}; use settings::Settings; use smol::channel::{Receiver, Sender}; use task::{HideStrategy, Shell, TaskId}; -use terminal_settings::{AlternateScroll, TerminalBlink, TerminalSettings}; +use terminal_settings::{AlternateScroll, CursorShape, TerminalBlink, TerminalSettings}; use theme::{ActiveTheme, Theme}; use util::truncate_and_trailoff; @@ -314,6 +316,7 @@ impl TerminalBuilder { shell: Shell, mut env: HashMap, blink_settings: Option, + cursor_shape: CursorShape, alternate_scroll: AlternateScroll, max_scroll_history_lines: Option, window: AnyWindowHandle, @@ -353,6 +356,7 @@ impl TerminalBuilder { // Setup Alacritty's env, which modifies the current process's environment alacritty_terminal::tty::setup_env(); + let default_cursor_style = AlacCursorStyle::from(cursor_shape); let scrolling_history = if task.is_some() { // Tasks like `cargo build --all` may produce a lot of output, ergo allow maximum scrolling. // After the task finishes, we do not allow appending to that terminal, so small tasks output should not @@ -365,6 +369,7 @@ impl TerminalBuilder { }; let config = Config { scrolling_history, + default_cursor_style, ..Config::default() }; @@ -951,6 +956,10 @@ impl Terminal { &self.last_content } + pub fn set_cursor_shape(&mut self, cursor_shape: CursorShape) { + self.term.lock().set_cursor_style(Some(cursor_shape.into())); + } + pub fn total_lines(&self) -> usize { let term = self.term.clone(); let terminal = term.lock_unfair(); diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index 4051caf864d25f..956cde19d90ed7 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -1,3 +1,6 @@ +use alacritty_terminal::vte::ansi::{ + CursorShape as AlacCursorShape, CursorStyle as AlacCursorStyle, +}; use collections::HashMap; use gpui::{ px, AbsoluteLength, AppContext, FontFallbacks, FontFeatures, FontWeight, Pixels, SharedString, @@ -32,6 +35,7 @@ pub struct TerminalSettings { pub font_weight: Option, pub line_height: TerminalLineHeight, pub env: HashMap, + pub cursor_shape: Option, pub blinking: TerminalBlink, pub alternate_scroll: AlternateScroll, pub option_as_meta: bool, @@ -129,6 +133,11 @@ pub struct TerminalSettingsContent { /// /// Default: {} pub env: Option>, + /// Default cursor shape for the terminal. + /// Can be "bar", "block", "underscore", or "hollow". + /// + /// Default: None + pub cursor_shape: Option, /// Sets the cursor blinking behavior in the terminal. /// /// Default: terminal_controlled @@ -282,3 +291,37 @@ pub struct ToolbarContent { /// Default: true pub title: Option, } + +#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum CursorShape { + /// Cursor is a block like `█`. + #[default] + Block, + /// Cursor is an underscore like `_`. + Underline, + /// Cursor is a vertical bar like `⎸`. + Bar, + /// Cursor is a hollow box like `▯`. + Hollow, +} + +impl From for AlacCursorShape { + fn from(value: CursorShape) -> Self { + match value { + CursorShape::Block => AlacCursorShape::Block, + CursorShape::Underline => AlacCursorShape::Underline, + CursorShape::Bar => AlacCursorShape::Beam, + CursorShape::Hollow => AlacCursorShape::HollowBlock, + } + } +} + +impl From for AlacCursorStyle { + fn from(value: CursorShape) -> Self { + AlacCursorStyle { + shape: value.into(), + blinking: false, + } + } +} diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index e0b92035d107b3..f7b38e3f5ca3e4 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -19,7 +19,7 @@ use terminal::{ index::Point, term::{search::RegexSearch, TermMode}, }, - terminal_settings::{TerminalBlink, TerminalSettings, WorkingDirectory}, + terminal_settings::{CursorShape, TerminalBlink, TerminalSettings, WorkingDirectory}, Clear, Copy, Event, MaybeNavigationTarget, Paste, ScrollLineDown, ScrollLineUp, ScrollPageDown, ScrollPageUp, ScrollToBottom, ScrollToTop, ShowCharacterPalette, TaskStatus, Terminal, TerminalSize, @@ -102,6 +102,7 @@ pub struct TerminalView { //Currently using iTerm bell, show bell emoji in tab until input is received has_bell: bool, context_menu: Option<(View, gpui::Point, Subscription)>, + cursor_shape: CursorShape, blink_state: bool, blinking_on: bool, blinking_paused: bool, @@ -171,6 +172,9 @@ impl TerminalView { let focus_out = cx.on_focus_out(&focus_handle, |terminal_view, _event, cx| { terminal_view.focus_out(cx); }); + let cursor_shape = TerminalSettings::get_global(cx) + .cursor_shape + .unwrap_or_default(); Self { terminal, @@ -178,6 +182,7 @@ impl TerminalView { has_bell: false, focus_handle, context_menu: None, + cursor_shape, blink_state: true, blinking_on: false, blinking_paused: false, @@ -255,6 +260,16 @@ impl TerminalView { fn settings_changed(&mut self, cx: &mut ViewContext) { let settings = TerminalSettings::get_global(cx); self.show_title = settings.toolbar.title; + + let new_cursor_shape = settings.cursor_shape.unwrap_or_default(); + let old_cursor_shape = self.cursor_shape; + if old_cursor_shape != new_cursor_shape { + self.cursor_shape = new_cursor_shape; + self.terminal.update(cx, |term, _| { + term.set_cursor_shape(self.cursor_shape); + }); + } + cx.notify(); } @@ -903,7 +918,10 @@ impl TerminalView { } fn focus_in(&mut self, cx: &mut ViewContext) { - self.terminal.read(cx).focus_in(); + self.terminal.update(cx, |terminal, _| { + terminal.set_cursor_shape(self.cursor_shape); + terminal.focus_in(); + }); self.blink_cursors(self.blink_epoch, cx); cx.invalidate_character_coordinates(); cx.notify(); @@ -912,6 +930,7 @@ impl TerminalView { fn focus_out(&mut self, cx: &mut ViewContext) { self.terminal.update(cx, |terminal, _| { terminal.focus_out(); + terminal.set_cursor_shape(CursorShape::Hollow); }); cx.notify(); } From 65f6a7e5bc56daa33470fcfdcfb5fa7080c18b13 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 30 Sep 2024 12:39:11 +0200 Subject: [PATCH 399/762] linux/x11: Give title bar inactive bg on mouse down (#18529) This fixes something that I felt was off for a while. Previously, when you'd click on the titlebar to move the window, the titlebar would only change its background once the moving starts, but not on mouse-down. That felt really off, since the moving is down with mouse-down and move, so I think giving the user feedback about the mouse-down event makes more sense. I know there's a subjectivity to this change, so I'm ready to hear other opinions, but for now I want to go with this. Release Notes: - N/A --- crates/title_bar/src/title_bar.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 73a82e9ee06201..d6cc839cfdb7bc 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -76,7 +76,7 @@ impl Render for TitleBar { let supported_controls = cx.window_controls(); let decorations = cx.window_decorations(); let titlebar_color = if cfg!(target_os = "linux") { - if cx.is_window_active() { + if cx.is_window_active() && !self.should_move { cx.theme().colors().title_bar_background } else { cx.theme().colors().title_bar_inactive_background From 8ae74bc6df9f4480d70202a0dbc4d975d25158af Mon Sep 17 00:00:00 2001 From: wannacu <76616478+wannacu@users.noreply.github.com> Date: Mon, 30 Sep 2024 18:57:59 +0800 Subject: [PATCH 400/762] gpui: Fix pre-edit position after applying scale factor (#18214) before: ![image](https://github.com/user-attachments/assets/20590089-3333-4ca8-a371-b07acfbe43f9) after: ![image](https://github.com/user-attachments/assets/2d25623e-0602-4d24-b563-64e1d2ec3492) Release Notes: - N/A --- crates/gpui/src/geometry.rs | 6 ++++++ crates/gpui/src/platform.rs | 6 +++--- crates/gpui/src/platform/linux/wayland/client.rs | 4 ++-- crates/gpui/src/platform/linux/wayland/window.rs | 4 ++-- crates/gpui/src/platform/linux/x11/client.rs | 5 +++-- crates/gpui/src/platform/linux/x11/window.rs | 8 ++++---- crates/gpui/src/platform/mac/window.rs | 7 ++++--- crates/gpui/src/platform/test/window.rs | 6 +++--- crates/gpui/src/platform/windows/window.rs | 2 +- crates/gpui/src/window.rs | 4 +++- 10 files changed, 31 insertions(+), 21 deletions(-) diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 8de9e6f009d733..9b9b1698043919 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -2612,6 +2612,12 @@ impl From for f64 { } } +impl From for u32 { + fn from(pixels: ScaledPixels) -> Self { + pixels.0 as u32 + } +} + /// Represents a length in rems, a unit based on the font-size of the window, which can be assigned with [`WindowContext::set_rem_size`][set_rem_size]. /// /// Rems are used for defining lengths that are scalable and consistent across different UI elements. diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 680c813078b86d..b5d6ae312783fe 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -23,8 +23,8 @@ use crate::{ point, Action, AnyWindowHandle, AppContext, AsyncWindowContext, BackgroundExecutor, Bounds, DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GPUSpecs, GlyphId, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, Point, - RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, SharedString, Size, - SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE, + RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, ScaledPixels, Scene, + SharedString, Size, SvgSize, Task, TaskLabel, WindowContext, DEFAULT_WINDOW_SIZE, }; use anyhow::Result; use async_task::Runnable; @@ -381,7 +381,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle { fn set_client_inset(&self, _inset: Pixels) {} fn gpu_specs(&self) -> Option; - fn update_ime_position(&self, _bounds: Bounds); + fn update_ime_position(&self, _bounds: Bounds); #[cfg(any(test, feature = "test-support"))] fn as_test(&mut self) -> Option<&mut TestWindow> { diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 57c43a7e46a0c0..ba4971b63af6d2 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -84,7 +84,7 @@ use crate::{ use crate::{ AnyWindowHandle, CursorStyle, DisplayId, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, - NavigationDirection, Pixels, PlatformDisplay, PlatformInput, Point, ScrollDelta, + NavigationDirection, Pixels, PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, ScrollWheelEvent, TouchPhase, }; use crate::{LinuxCommon, WindowParams}; @@ -313,7 +313,7 @@ impl WaylandClientStatePtr { } } - pub fn update_ime_position(&self, bounds: Bounds) { + pub fn update_ime_position(&self, bounds: Bounds) { let client = self.get_client(); let mut state = client.borrow_mut(); if state.composing || state.text_input.is_none() || state.pre_edit_text.is_some() { diff --git a/crates/gpui/src/platform/linux/wayland/window.rs b/crates/gpui/src/platform/linux/wayland/window.rs index 329b6c185fbfe9..1ca358edecd0af 100644 --- a/crates/gpui/src/platform/linux/wayland/window.rs +++ b/crates/gpui/src/platform/linux/wayland/window.rs @@ -26,7 +26,7 @@ use crate::platform::{PlatformAtlas, PlatformInputHandler, PlatformWindow}; use crate::scene::Scene; use crate::{ px, size, AnyWindowHandle, Bounds, Decorations, GPUSpecs, Globals, Modifiers, Output, Pixels, - PlatformDisplay, PlatformInput, Point, PromptLevel, ResizeEdge, Size, Tiling, + PlatformDisplay, PlatformInput, Point, PromptLevel, ResizeEdge, ScaledPixels, Size, Tiling, WaylandClientStatePtr, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations, WindowParams, }; @@ -1010,7 +1010,7 @@ impl PlatformWindow for WaylandWindow { } } - fn update_ime_position(&self, bounds: Bounds) { + fn update_ime_position(&self, bounds: Bounds) { let state = self.borrow(); state.client.update_ime_position(bounds); } diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index d6c806236a9e47..7f5342a50360eb 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -38,7 +38,8 @@ use crate::platform::{LinuxCommon, PlatformWindow}; use crate::{ modifiers_from_xinput_info, point, px, AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, Pixels, Platform, - PlatformDisplay, PlatformInput, Point, ScrollDelta, Size, TouchPhase, WindowParams, X11Window, + PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, Size, TouchPhase, + WindowParams, X11Window, }; use super::{button_of_key, modifiers_from_state, pressed_button_from_mask}; @@ -188,7 +189,7 @@ impl X11ClientStatePtr { } } - pub fn update_ime_position(&self, bounds: Bounds) { + pub fn update_ime_position(&self, bounds: Bounds) { let client = self.get_client(); let mut state = client.0.borrow_mut(); if state.composing || state.ximc.is_none() { diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index b0cf82d605ffff..62b895d01f426c 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -4,9 +4,9 @@ use crate::{ platform::blade::{BladeRenderer, BladeSurfaceConfig}, px, size, AnyWindowHandle, Bounds, Decorations, DevicePixels, ForegroundExecutor, GPUSpecs, Modifiers, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, - PlatformWindow, Point, PromptLevel, ResizeEdge, Scene, Size, Tiling, WindowAppearance, - WindowBackgroundAppearance, WindowBounds, WindowDecorations, WindowKind, WindowParams, - X11ClientStatePtr, + PlatformWindow, Point, PromptLevel, ResizeEdge, ScaledPixels, Scene, Size, Tiling, + WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowDecorations, WindowKind, + WindowParams, X11ClientStatePtr, }; use blade_graphics as gpu; @@ -1412,7 +1412,7 @@ impl PlatformWindow for X11Window { } } - fn update_ime_position(&self, bounds: Bounds) { + fn update_ime_position(&self, bounds: Bounds) { let mut state = self.0.state.borrow_mut(); let client = state.client.clone(); drop(state); diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index bf8369069731f1..885c3565ccc899 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -3,8 +3,9 @@ use crate::{ platform::PlatformInputHandler, point, px, size, AnyWindowHandle, Bounds, DisplayLink, ExternalPaths, FileDropEvent, ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, - PlatformAtlas, PlatformDisplay, PlatformInput, PlatformWindow, Point, PromptLevel, Size, Timer, - WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowKind, WindowParams, + PlatformAtlas, PlatformDisplay, PlatformInput, PlatformWindow, Point, PromptLevel, + ScaledPixels, Size, Timer, WindowAppearance, WindowBackgroundAppearance, WindowBounds, + WindowKind, WindowParams, }; use block::ConcreteBlock; use cocoa::{ @@ -1119,7 +1120,7 @@ impl PlatformWindow for MacWindow { None } - fn update_ime_position(&self, _bounds: Bounds) { + fn update_ime_position(&self, _bounds: Bounds) { unsafe { let input_context: id = msg_send![class!(NSTextInputContext), currentInputContext]; let _: () = msg_send![input_context, invalidateCharacterCoordinates]; diff --git a/crates/gpui/src/platform/test/window.rs b/crates/gpui/src/platform/test/window.rs index 1464dd8e73b384..ebbf6ecc12b466 100644 --- a/crates/gpui/src/platform/test/window.rs +++ b/crates/gpui/src/platform/test/window.rs @@ -1,8 +1,8 @@ use crate::{ AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DispatchEventResult, GPUSpecs, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, - Point, Size, TestPlatform, TileId, WindowAppearance, WindowBackgroundAppearance, WindowBounds, - WindowParams, + Point, ScaledPixels, Size, TestPlatform, TileId, WindowAppearance, WindowBackgroundAppearance, + WindowBounds, WindowParams, }; use collections::HashMap; use parking_lot::Mutex; @@ -274,7 +274,7 @@ impl PlatformWindow for TestWindow { unimplemented!() } - fn update_ime_position(&self, _bounds: Bounds) {} + fn update_ime_position(&self, _bounds: Bounds) {} fn gpu_specs(&self) -> Option { None diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index e2cfb38afd4dfc..d5ea3be6cac5e6 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -685,7 +685,7 @@ impl PlatformWindow for WindowsWindow { Some(self.0.state.borrow().renderer.gpu_specs()) } - fn update_ime_position(&self, _bounds: Bounds) { + fn update_ime_position(&self, _bounds: Bounds) { // todo(windows) } } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 57066b0ce64c5e..af968c5a2c1239 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -3610,7 +3610,9 @@ impl<'a> WindowContext<'a> { self.on_next_frame(|cx| { if let Some(mut input_handler) = cx.window.platform_window.take_input_handler() { if let Some(bounds) = input_handler.selected_bounds(cx) { - cx.window.platform_window.update_ime_position(bounds); + cx.window + .platform_window + .update_ime_position(bounds.scale(cx.scale_factor())); } cx.window.platform_window.set_input_handler(input_handler); } From e64a86ce9fdf2ee72cf8cc6572295bf3b372b7e1 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 30 Sep 2024 15:28:46 +0300 Subject: [PATCH 401/762] Fix a typo in the multi buffers documentation (#18535) Closes https://github.com/zed-industries/zed/issues/18533 Release Notes: - N/A --- docs/src/multibuffers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/multibuffers.md b/docs/src/multibuffers.md index 46bfb1d93f7e41..7d9f4cafc4ab3b 100644 --- a/docs/src/multibuffers.md +++ b/docs/src/multibuffers.md @@ -31,7 +31,7 @@ To start a search run the `pane: Toggle Search` command (`cmd-shift-f` on macOS, ## Diagnostics -If you have a language server installed, the diagnostics pane can show you all errors across your project. You can open it by clicking on the icon in the status bar, or running the `diagnostcs: Deploy` command` ('cmd-shift-m` on macOS, `ctrl-shift-m` on Windows/Linux, or `:clist` in Vim mode). +If you have a language server installed, the diagnostics pane can show you all errors across your project. You can open it by clicking on the icon in the status bar, or running the `diagnostics: Deploy` command` ('cmd-shift-m` on macOS, `ctrl-shift-m` on Windows/Linux, or `:clist` in Vim mode). ## Find References From 215bce19748652820e01eecff6a1052586f357ae Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Mon, 30 Sep 2024 16:35:36 +0300 Subject: [PATCH 402/762] Make direct direnv loading default (#18536) I've been running with direct direnv loading for a while now and haven't experienced any significant issues other than #18473. Making it default would make direnv integration more reliable and consistent. I've also updated the docs a bit to ensure that they represent current status of direnv integration Release Notes: - Made direnv integration use direct (`direnv export json`) mode by default instead of relying on a shell hook, improving consistency and reliability of direnv detection --- assets/settings/default.json | 10 ++++------ crates/project/src/project_settings.rs | 5 +---- docs/src/configuring-zed.md | 6 ++++-- 3 files changed, 9 insertions(+), 12 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index ba95c2cfcdaaa5..f6c498e0278e6a 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -599,13 +599,11 @@ } }, // Configuration for how direnv configuration should be loaded. May take 2 values: - // 1. Load direnv configuration through the shell hook, works for POSIX shells and fish. - // "load_direnv": "shell_hook" - // 2. Load direnv configuration using `direnv export json` directly. - // This can help with some shells that otherwise would not detect - // the direnv environment, such as nushell or elvish. + // 1. Load direnv configuration using `direnv export json` directly. // "load_direnv": "direct" - "load_direnv": "shell_hook", + // 2. Load direnv configuration through the shell hook, works for POSIX shells and fish. + // "load_direnv": "shell_hook" + "load_direnv": "direct", "inline_completions": { // A list of globs representing files that inline completions should be disabled for. "disabled_globs": [".env"] diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 706d3afdce45e2..d794563672ed06 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -62,12 +62,9 @@ pub struct NodeBinarySettings { #[serde(rename_all = "snake_case")] pub enum DirenvSettings { /// Load direnv configuration through a shell hook - #[default] ShellHook, /// Load direnv configuration directly using `direnv export json` - /// - /// Warning: This option is experimental and might cause some inconsistent behavior compared to using the shell hook. - /// If it does, please report it to GitHub + #[default] Direct, } diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 18d66708ad7fc8..ad6a628ed0880f 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -267,12 +267,14 @@ left and right padding of the central pane from the workspace when the centered ## Direnv Integration -- Description: Settings for [direnv](https://direnv.net/) integration. Requires `direnv` to be installed. `direnv` integration currently only means that the environment variables set by a `direnv` configuration can be used to detect some language servers in `$PATH` instead of installing them. +- Description: Settings for [direnv](https://direnv.net/) integration. Requires `direnv` to be installed. + `direnv` integration make it possible to use the environment variables set by a `direnv` configuration to detect some language servers in `$PATH` instead of installing them. + It also allows for those environment variables to be used in tasks. - Setting: `load_direnv` - Default: ```json -"load_direnv": "shell_hook" +"load_direnv": "direct" ``` **Options** From 69e698c3be73b6e31605abd317b835f63670172c Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 30 Sep 2024 15:36:35 +0200 Subject: [PATCH 403/762] terminal: Fix blinking settings & blinking with custom shape (#18538) This is a follow-up to #18530 thanks to this comment here: https://github.com/zed-industries/zed/pull/18530#issuecomment-2382870564 In short: it fixes the `blinking` setting and the `cursor_shape` setting as it relates to blinking. Turns out our `blinking` setting was always the wrong value when using `terminal_controlled` and the terminal _would_ control the blinking. Example script to test with: ```bash echo -e "0 normal \x1b[\x30 q"; sleep 2 echo -e "1 blink block \x1b[\x31 q"; sleep 2 echo -e "2 solid block \x1b[\x32 q"; sleep 2 echo -e "3 blink under \x1b[\x33 q"; sleep 2 echo -e "4 solid under \x1b[\x34 q"; sleep 2 echo -e "5 blink vert \x1b[\x35 q"; sleep 2 echo -e "6 solid vert \x1b[\x36 q"; sleep 2 echo -e "0 normal \x1b[\x30 q"; sleep 2 echo -e "color \x1b]12;#00ff00\x1b\\"; sleep 2 echo -e "reset \x1b]112\x1b\\ \x1b[\x30 q" ``` Before the changes in here, this script would set the cursor shape and the blinking, but the blinking boolean would always be wrong. This change here makes sure that it works consistently: - `terminal.cursor_shape` only controls the *default* shape of the terminal, not the blinking. - `terminal.blinking = on` means that it's *always* blinking, regardless of what terminal programs want - `terminal.blinking = off` means that it's *never* blinking, regardless of what terminal programs want - `terminal.blinking = terminal_controlled (default)` means that it's blinking depending on what terminal programs want. when a terminal program resets the cursor to default, it sets it back to `terminal.cursor_shape` if that is set. Release Notes: - Fixed the behavior of `{"terminal": {"blinking": "[on|off|terminal_controlled]"}` to work correctly and to work correctly when custom `cursor_shape` is set. - `terminal.cursor_shape` only controls the *default* shape of the terminal, not the blinking. - `terminal.blinking = on` means that it's *always* blinking, regardless of what terminal programs want - `terminal.blinking = off` means that it's *never* blinking, regardless of what terminal programs want - `terminal.blinking = terminal_controlled (default)` means that it's blinking depending on what terminal programs want. when a terminal program resets the cursor to default, it sets it back to `terminal.cursor_shape` if that is set. Demo: https://github.com/user-attachments/assets/b3fbeafd-ad58-41c8-9c07-1f03bc31771f Co-authored-by: Bennet --- crates/project/src/terminals.rs | 1 - crates/terminal/src/terminal.rs | 21 ++++++++++----------- crates/terminal_view/src/terminal_view.rs | 19 ++++++++++++++----- 3 files changed, 24 insertions(+), 17 deletions(-) diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index ababb3261b3376..54dd48cf433ff3 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -215,7 +215,6 @@ impl Project { spawn_task, shell, env, - Some(settings.blinking), settings.cursor_shape.unwrap_or_default(), settings.alternate_scroll, settings.max_scroll_history_lines, diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index f9767b07d12612..b51308df378845 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -42,7 +42,7 @@ use serde::{Deserialize, Serialize}; use settings::Settings; use smol::channel::{Receiver, Sender}; use task::{HideStrategy, Shell, TaskId}; -use terminal_settings::{AlternateScroll, CursorShape, TerminalBlink, TerminalSettings}; +use terminal_settings::{AlternateScroll, CursorShape, TerminalSettings}; use theme::{ActiveTheme, Theme}; use util::truncate_and_trailoff; @@ -102,7 +102,7 @@ pub enum Event { CloseTerminal, Bell, Wakeup, - BlinkChanged, + BlinkChanged(bool), SelectionsChanged, NewNavigationTarget(Option), Open(MaybeNavigationTarget), @@ -315,7 +315,6 @@ impl TerminalBuilder { task: Option, shell: Shell, mut env: HashMap, - blink_settings: Option, cursor_shape: CursorShape, alternate_scroll: AlternateScroll, max_scroll_history_lines: Option, @@ -378,16 +377,11 @@ impl TerminalBuilder { let (events_tx, events_rx) = unbounded(); //Set up the terminal... let mut term = Term::new( - config, + config.clone(), &TerminalSize::default(), ZedListener(events_tx.clone()), ); - //Start off blinking if we need to - if let Some(TerminalBlink::On) = blink_settings { - term.set_private_mode(PrivateMode::Named(NamedPrivateMode::BlinkingCursor)); - } - //Alacritty defaults to alternate scrolling being on, so we just need to turn it off. if let AlternateScroll::Off = alternate_scroll { term.unset_private_mode(PrivateMode::Named(NamedPrivateMode::AlternateScroll)); @@ -437,6 +431,7 @@ impl TerminalBuilder { pty_tx: Notifier(pty_tx), completion_tx, term, + term_config: config, events: VecDeque::with_capacity(10), //Should never get this high. last_content: Default::default(), last_mouse: None, @@ -588,6 +583,7 @@ pub struct Terminal { pty_tx: Notifier, completion_tx: Sender<()>, term: Arc>>, + term_config: Config, events: VecDeque, /// This is only used for mouse mode cell change detection last_mouse: Option<(AlacPoint, AlacDirection)>, @@ -672,7 +668,9 @@ impl Terminal { self.write_to_pty(format(self.last_content.size.into())) } AlacTermEvent::CursorBlinkingChange => { - cx.emit(Event::BlinkChanged); + let terminal = self.term.lock(); + let blinking = terminal.cursor_style().blinking; + cx.emit(Event::BlinkChanged(blinking)); } AlacTermEvent::Bell => { cx.emit(Event::Bell); @@ -957,7 +955,8 @@ impl Terminal { } pub fn set_cursor_shape(&mut self, cursor_shape: CursorShape) { - self.term.lock().set_cursor_style(Some(cursor_shape.into())); + self.term_config.default_cursor_style = cursor_shape.into(); + self.term.lock().set_options(self.term_config.clone()); } pub fn total_lines(&self) -> usize { diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index f7b38e3f5ca3e4..ce65be30c6d4f2 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -104,7 +104,7 @@ pub struct TerminalView { context_menu: Option<(View, gpui::Point, Subscription)>, cursor_shape: CursorShape, blink_state: bool, - blinking_on: bool, + blinking_terminal_enabled: bool, blinking_paused: bool, blink_epoch: usize, can_navigate_to_selected_word: bool, @@ -184,7 +184,7 @@ impl TerminalView { context_menu: None, cursor_shape, blink_state: true, - blinking_on: false, + blinking_terminal_enabled: false, blinking_paused: false, blink_epoch: 0, can_navigate_to_selected_word: false, @@ -434,7 +434,6 @@ impl TerminalView { pub fn should_show_cursor(&self, focused: bool, cx: &mut gpui::ViewContext) -> bool { //Don't blink the cursor when not focused, blinking is disabled, or paused if !focused - || !self.blinking_on || self.blinking_paused || self .terminal @@ -450,7 +449,10 @@ impl TerminalView { //If the user requested to never blink, don't blink it. TerminalBlink::Off => true, //If the terminal is controlling it, check terminal mode - TerminalBlink::TerminalControlled | TerminalBlink::On => self.blink_state, + TerminalBlink::TerminalControlled => { + !self.blinking_terminal_enabled || self.blink_state + } + TerminalBlink::On => self.blink_state, } } @@ -642,7 +644,14 @@ fn subscribe_for_terminal_events( cx.emit(Event::Wakeup); } - Event::BlinkChanged => this.blinking_on = !this.blinking_on, + Event::BlinkChanged(blinking) => { + if matches!( + TerminalSettings::get_global(cx).blinking, + TerminalBlink::TerminalControlled + ) { + this.blinking_terminal_enabled = *blinking; + } + } Event::TitleChanged => { cx.emit(ItemEvent::UpdateTab); From 053e31994fd5ffc99cd0a59404fb5c4fac4fd3c5 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 30 Sep 2024 18:33:20 +0200 Subject: [PATCH 404/762] Fine-tune hunk controls block (#18543) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR changes the undo icon and adds a background color so that indent lines don't bleed through the control block. Screenshot 2024-09-30 at 5 38 44 PM Release Notes: - N/A --- crates/editor/src/hunk_diff.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 4e7a0f05f4a25b..2ccd60c668de7f 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -360,8 +360,11 @@ impl Editor { h_flex() .id(cx.block_id) - .w_full() .h(cx.line_height()) + .w_full() + .border_t_1() + .border_color(border_color) + .bg(cx.theme().colors().editor_background) .child( div() .id("gutter-strip") @@ -381,12 +384,10 @@ impl Editor { ) .child( h_flex() - .pl_1p5() + .pl_2() .pr_6() .size_full() .justify_between() - .border_t_1() - .border_color(border_color) .child( h_flex() .gap_1() @@ -513,7 +514,7 @@ impl Editor { }), ) .child( - IconButton::new("discard", IconName::RotateCcw) + IconButton::new("discard", IconName::Undo) .shape(IconButtonShape::Square) .icon_size(IconSize::Small) .tooltip({ From 74cba2407f3c5999ab27597dde1643e7e748db8e Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 16:14:26 -0400 Subject: [PATCH 405/762] ci: Move collab to Dockerfile-collab (#18515) This makes it possible to have multiple Dockerfiles, each with their own `.dockerignore`. Previously any docker builds would always include anything inside `.dockerignore`. I believe this feature may require `export DOCKER_BUILDKIT=1` but we use that in CI already. --- .github/workflows/deploy_collab.yml | 6 +++++- .zed/settings.json | 4 ++++ Dockerfile => Dockerfile-collab | 0 .dockerignore => Dockerfile-collab.dockerignore | 0 4 files changed, 9 insertions(+), 1 deletion(-) rename Dockerfile => Dockerfile-collab (100%) rename .dockerignore => Dockerfile-collab.dockerignore (100%) diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index c4193adcd24349..1e6e6cf2803e20 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -76,7 +76,11 @@ jobs: clean: false - name: Build docker image - run: docker build . --build-arg GITHUB_SHA=$GITHUB_SHA --tag registry.digitalocean.com/zed/collab:$GITHUB_SHA + run: | + docker build -f Dockerfile-collab \ + --build-arg GITHUB_SHA=$GITHUB_SHA \ + --tag registry.digitalocean.com/zed/collab:$GITHUB_SHA \ + . - name: Publish docker image run: docker push registry.digitalocean.com/zed/collab:${GITHUB_SHA} diff --git a/.zed/settings.json b/.zed/settings.json index 176fd33a9b966d..41adfdbf591d36 100644 --- a/.zed/settings.json +++ b/.zed/settings.json @@ -38,6 +38,10 @@ } } }, + "file_types": { + "Dockerfile": ["Dockerfile*[!dockerignore]"], + "Git Ignore": ["dockerignore"] + }, "hard_tabs": false, "formatter": "auto", "remove_trailing_whitespace_on_save": true, diff --git a/Dockerfile b/Dockerfile-collab similarity index 100% rename from Dockerfile rename to Dockerfile-collab diff --git a/.dockerignore b/Dockerfile-collab.dockerignore similarity index 100% rename from .dockerignore rename to Dockerfile-collab.dockerignore From 09424edc3510b3b6004f93f7c7f3c5e173a4c5fa Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 16:17:21 -0400 Subject: [PATCH 406/762] ci: Add script/determine-release-channel (#18476) - Refactor duplicated inline script from ci.yml to `script/determine-release-channel` - Remove references to non-existent '-nightly' release tags Release Notes: - N/A --- .github/workflows/ci.yml | 79 ++++---------------------------- script/bump-zed-patch-version | 5 +- script/determine-release-channel | 32 +++++++++++++ 3 files changed, 43 insertions(+), 73 deletions(-) create mode 100755 script/determine-release-channel diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 07e5499d5eb76c..ca0d8bb9100fd8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -192,29 +192,12 @@ jobs: - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | - set -eu - - version=$(script/get-crate-version zed) - channel=$(cat crates/zed/RELEASE_CHANNEL) - echo "Publishing version: ${version} on release channel ${channel}" - echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV - - expected_tag_name="" - case ${channel} in - stable) - expected_tag_name="v${version}";; - preview) - expected_tag_name="v${version}-pre";; - nightly) - expected_tag_name="v${version}-nightly";; - *) - echo "can't publish a release on channel ${channel}" - exit 1;; - esac - if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then - echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" - exit 1 - fi + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel + + - name: Draft release notes + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + run: | mkdir -p target/ # Ignore any errors that occur while drafting release notes to not fail the build. script/draft-release-notes "$version" "$channel" > target/release-notes.md || true @@ -289,29 +272,8 @@ jobs: - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | - set -eu - - version=$(script/get-crate-version zed) - channel=$(cat crates/zed/RELEASE_CHANNEL) - echo "Publishing version: ${version} on release channel ${channel}" - echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV - - expected_tag_name="" - case ${channel} in - stable) - expected_tag_name="v${version}";; - preview) - expected_tag_name="v${version}-pre";; - nightly) - expected_tag_name="v${version}-nightly";; - *) - echo "can't publish a release on channel ${channel}" - exit 1;; - esac - if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then - echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" - exit 1 - fi + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel - name: Create Linux .tar.gz bundle run: script/bundle-linux @@ -357,29 +319,8 @@ jobs: - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} run: | - set -eu - - version=$(script/get-crate-version zed) - channel=$(cat crates/zed/RELEASE_CHANNEL) - echo "Publishing version: ${version} on release channel ${channel}" - echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV - - expected_tag_name="" - case ${channel} in - stable) - expected_tag_name="v${version}";; - preview) - expected_tag_name="v${version}-pre";; - nightly) - expected_tag_name="v${version}-nightly";; - *) - echo "can't publish a release on channel ${channel}" - exit 1;; - esac - if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then - echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" - exit 1 - fi + # This exports RELEASE_CHANNEL into env (GITHUB_ENV) + script/determine-release-channel - name: Create and upload Linux .tar.gz bundle run: script/bundle-linux diff --git a/script/bump-zed-patch-version b/script/bump-zed-patch-version index e00e747aa32dac..b52feff72f1cb4 100755 --- a/script/bump-zed-patch-version +++ b/script/bump-zed-patch-version @@ -9,11 +9,8 @@ case $channel in preview) tag_suffix="-pre" ;; - nightly) - tag_suffix="-nightly" - ;; *) - echo "this must be run on either of stable|preview|nightly release branches" >&2 + echo "this must be run on either of stable|preview release branches" >&2 exit 1 ;; esac diff --git a/script/determine-release-channel b/script/determine-release-channel new file mode 100755 index 00000000000000..ae49fbf1e5d076 --- /dev/null +++ b/script/determine-release-channel @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [ -z "${GITHUB_ACTIONS-}" ]; then + echo "Error: This script must be run in a GitHub Actions environment" + exit 1 +elif [ -z "${GITHUB_REF-}" ]; then + # This should be the release tag 'v0.x.x' + echo "Error: GITHUB_REF is not set" + exit 1 +fi + +version=$(script/get-crate-version zed) +channel=$(cat crates/zed/RELEASE_CHANNEL) +echo "Publishing version: ${version} on release channel ${channel}" +echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV + +expected_tag_name="" +case ${channel} in +stable) + expected_tag_name="v${version}";; +preview) + expected_tag_name="v${version}-pre";; +*) + echo "can't publish a release on channel ${channel}" + exit 1;; +esac +if [[ $GITHUB_REF_NAME != $expected_tag_name ]]; then + echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}" + exit 1 +fi From 432de00e894e6a104fa69e9146081b159de2dc43 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 17:02:19 -0400 Subject: [PATCH 407/762] ci: Use BuildJet Ubuntu 20.04 runners for better glibc compatibility (#18442) Use BuildJet Ubuntu 20.04 runners. - Linux arm64 unchanged (glibc >= 2.35) - Linux x64 glibc requirement becomes to >= 2.31 (from glibc >= 2.35). Note: Ubuntu 20.04 repo cmake (3.16.3) is normally too old to build Zed, but `ubuntu-2004` [includes cmake 3.30.3](https://github.com/actions/runner-images/blob/main/images/ubuntu/Ubuntu2004-Readme.md#tools). --- .github/workflows/ci.yml | 4 +-- .github/workflows/release_nightly.yml | 4 +-- docs/src/linux.md | 6 ++--- script/install-mold | 37 +++++++++++++++++++++++++++ script/linux | 14 ++++++++-- 5 files changed, 56 insertions(+), 9 deletions(-) create mode 100755 script/install-mold diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ca0d8bb9100fd8..ef1570fbf2121a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -254,7 +254,7 @@ jobs: timeout-minutes: 60 name: Create a Linux bundle runs-on: - - buildjet-16vcpu-ubuntu-2204 + - buildjet-16vcpu-ubuntu-2004 if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} needs: [linux_tests] env: @@ -267,7 +267,7 @@ jobs: clean: false - name: Install Linux dependencies - run: ./script/linux + run: ./script/linux && ./script/install-mold 2.34.0 - name: Determine version and release channel if: ${{ startsWith(github.ref, 'refs/tags/v') }} diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 4e8a257bdd1bb8..349d14f990fcbf 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -100,7 +100,7 @@ jobs: name: Create a Linux *.tar.gz bundle for x86 if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-16vcpu-ubuntu-2204 + - buildjet-16vcpu-ubuntu-2004 needs: tests env: DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} @@ -117,7 +117,7 @@ jobs: run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH - name: Install Linux dependencies - run: ./script/linux + run: ./script/linux && ./script/install-mold 2.34.0 - name: Limit target directory size run: script/clear-target-dir-if-larger-than 100 diff --git a/docs/src/linux.md b/docs/src/linux.md index 17334c325ca309..3bba9c8f93aaae 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -16,7 +16,7 @@ The Zed installed by the script works best on systems that: - have a Vulkan compatible GPU available (for example Linux on an M-series macBook) - have a system-wide glibc (NixOS and Alpine do not by default) - - x86_64 (Intel/AMD): glibc version >= 2.35 (Ubuntu 22 and newer) + - x86_64 (Intel/AMD): glibc version >= 2.29 (Ubuntu 20 and newer) - aarch64 (ARM): glibc version >= 2.35 (Ubuntu 22 and newer) Both Nix and Alpine have third-party Zed packages available (though they are currently a few weeks out of date). If you'd like to use our builds they do work if you install a glibc compatibility layer. On NixOS you can try [nix-ld](https://github.com/Mic92/nix-ld), and on Alpine [gcompat](https://wiki.alpinelinux.org/wiki/Running_glibc_programs). @@ -24,8 +24,8 @@ Both Nix and Alpine have third-party Zed packages available (though they are cur You will need to build from source for: - architectures other than 64-bit Intel or 64-bit ARM (for example a 32-bit or RISC-V machine) -- Amazon Linux -- Rocky Linux 9.3 +- Redhat Enterprise Linux 8.x, Rocky Linux 8, AlmaLinux 8, Amazon Linux 2 on all architectures +- Redhat Enterprise Linux 9.x, Rocky Linux 9.3, AlmaLinux 8, Amazon Linux 2023 on aarch64 (x86_x64 OK) ## Other ways to install Zed on Linux diff --git a/script/install-mold b/script/install-mold new file mode 100755 index 00000000000000..1c451b72690614 --- /dev/null +++ b/script/install-mold @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Install `mold` official binaries from GitHub Releases. +# +# Adapted from the official rui314/setup-mold@v1 action to: +# * use environment variables instead of action inputs +# * remove make-default support +# * use curl instead of wget +# * support doas for sudo +# * support redhat systems +# See: https://github.com/rui314/setup-mold/blob/main/action.yml + +set -euo pipefail + +MOLD_VERSION="${MOLD_VERSION:-${1:-}}" +if [ "$(uname -s)" != "Linux" ]; then + echo "Error: This script is intended for Linux systems only." + exit 1 +elif [ -z "$MOLD_VERSION" ]; then + echo "Usage: $0 2.34.0" + exit 1 +elif [ -e /usr/local/bin/mold ]; then + echo "Warning: existing mold found at /usr/local/bin/mold. Skipping installation." + exit 0 +fi + +if [ "$(whoami)" = root ]; then SUDO=; else SUDO="$(command -v sudo || command -v doas || true)"; fi + +MOLD_REPO="${MOLD_REPO:-https://github.com/rui314/mold}" +MOLD_URL="${MOLD_URL:-$MOLD_REPO}/releases/download/v$MOLD_VERSION/mold-$MOLD_VERSION-$(uname -m)-linux.tar.gz" + +echo "Downloading from $MOLD_URL" +curl --location --show-error --output - --retry 3 --retry-delay 5 "$MOLD_URL" \ + | $SUDO tar -C /usr/local --strip-components=1 --no-overwrite-dir -xzf - + +# Note this binary depends on the system libatomic.so.1 which is usually +# provided as a dependency of gcc so it should be available on most systems. diff --git a/script/linux b/script/linux index eca3bf7f7d2497..189407a5107b86 100755 --- a/script/linux +++ b/script/linux @@ -20,19 +20,29 @@ if [[ -n $apt ]]; then libwayland-dev libxkbcommon-x11-dev libssl-dev - libstdc++-12-dev libzstd-dev libvulkan1 libgit2-dev make cmake clang - mold jq gettext-base elfutils libsqlite3-dev ) + # Ubuntu 20.04 / Debian Bullseye (including CI for release) + if grep -q "bullseye" /etc/debian_version; then + deps+=( + libstdc++-10-dev + ) + else + deps+=( + libstdc++-12-dev + mold + ) + fi + $maysudo "$apt" update $maysudo "$apt" install -y "${deps[@]}" exit 0 From 3010dfe038dce73a4d5b809b6d0949e723c76835 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 17:46:21 -0400 Subject: [PATCH 408/762] Support More Linux (#18480) - Add `script/build-docker` - Add `script/install-cmake` - Add `script/install-mold` - Improve `script/linux` - Add missing dependencies: `jq`, `git`, `tar`, `gzip` as required. - Add check for mold - Fix Redhat 8.x derivatives (RHEL, Centos, Almalinux, Rocky, Oracle, Amazon) - Fix perl libs to be Fedora only - Install the best `libstdc++` available on apt distros - ArchLinux: run `pacman -Syu` to update repos before installing. - Should work on Raspbian (untested) This make it possible to test builds on other distros using docker: ``` ./script/build-docker amazonlinux:2023 ``` --- Dockerfile-distros | 26 +++++++ Dockerfile-distros.dockerignore | 2 + docs/src/development/macos.md | 6 ++ script/build-docker | 25 +++++++ script/install-cmake | 77 +++++++++++++++++++++ script/install-mold | 2 +- script/linux | 116 +++++++++++++++++++++++--------- 7 files changed, 220 insertions(+), 34 deletions(-) create mode 100644 Dockerfile-distros create mode 100644 Dockerfile-distros.dockerignore create mode 100755 script/build-docker create mode 100755 script/install-cmake diff --git a/Dockerfile-distros b/Dockerfile-distros new file mode 100644 index 00000000000000..c8a98d2f7db9bd --- /dev/null +++ b/Dockerfile-distros @@ -0,0 +1,26 @@ +# syntax=docker/dockerfile:1 + +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +WORKDIR /app +ARG TZ=Etc/UTC \ + LANG=C.UTF-8 \ + LC_ALL=C.UTF-8 \ + DEBIAN_FRONTEND=noninteractive +ENV CARGO_TERM_COLOR=always + +COPY script/linux script/ +RUN ./script/linux +COPY script/install-mold script/install-cmake script/ +RUN ./script/install-mold "2.34.0" +RUN ./script/install-cmake "3.30.4" + +COPY . . + +# When debugging, make these into individual RUN statements. +# Cleanup to avoid saving big layers we aren't going to use. +RUN . "$HOME/.cargo/env" \ + && cargo fetch \ + && cargo build \ + && cargo run -- --help \ + && cargo clean --quiet diff --git a/Dockerfile-distros.dockerignore b/Dockerfile-distros.dockerignore new file mode 100644 index 00000000000000..de70e0d16772e2 --- /dev/null +++ b/Dockerfile-distros.dockerignore @@ -0,0 +1,2 @@ +**/target +**/node_modules diff --git a/docs/src/development/macos.md b/docs/src/development/macos.md index 1407b0f610dc68..c5cbf56e15f1a3 100644 --- a/docs/src/development/macos.md +++ b/docs/src/development/macos.md @@ -35,6 +35,12 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). brew install cmake ``` +- (Optional) Install `mold` to speed up link times + + ```sh + brew install mold + ``` + ## Backend Dependencies If you are developing collaborative features of Zed, you'll need to install the dependencies of zed's `collab` server: diff --git a/script/build-docker b/script/build-docker new file mode 100755 index 00000000000000..c5ea294c734200 --- /dev/null +++ b/script/build-docker @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +# Use a docker BASE_IMAGE to test building Zed. +# e.g: ./script/bundle-docker ubuntu:20.04 +# +# Increasing resources available to podman may speed this up: +# podman machine stop +# podman machine set --memory 16384 --cpus 8 --disk-size 200 +# podman machine start + +set -euo pipefail + +BASE_IMAGE=${BASE_IMAGE:-${1:-}} +if [ -z "$BASE_IMAGE" ]; then + echo "Usage: $0 BASE_IMAGE" >&2 + exit 1 +fi + +export DOCKER_BUILDKIT=1 +cd "$(dirname "$0")/.." + +podman build . \ + -f Dockerfile-distros \ + -t many \ + --build-arg BASE_IMAGE="$BASE_IMAGE" diff --git a/script/install-cmake b/script/install-cmake new file mode 100755 index 00000000000000..71b5aaeeef2ed1 --- /dev/null +++ b/script/install-cmake @@ -0,0 +1,77 @@ +#!/usr/bin/env bash +# +# This script installs an up-to-date version of CMake. +# +# For MacOS use Homebrew to install the latest version. +# +# For Ubuntu use the official KitWare Apt repository with backports. +# See: https://apt.kitware.com/ +# +# For other systems (RHEL 8.x, 9.x, AmazonLinux, SUSE, Fedora, Arch, etc) +# use the official CMake installer script from KitWare. +# +# Note this is similar to how GitHub Actions runners install cmake: +# https://github.com/actions/runner-images/blob/main/images/ubuntu/scripts/build/install-cmake.sh +# +# Upstream: 3.30.4 (2024-09-27) + +set -euo pipefail + + +if [[ "$(uname -s)" == "darwin" ]]; then + brew --version >/dev/null \ + || echo "Error: Homebrew is required to install cmake on MacOS." && exit 1 + echo "Installing cmake via Homebrew (can't pin to old versions)." + brew install cmake + exit 0 +elif [ "$(uname -s)" != "Linux" ]; then + echo "Error: This script is intended for MacOS/Linux systems only." + exit 1 +elif [ -z "${1:-}" ]; then + echo "Usage: $0 [3.30.4]" + exit 1 +fi +CMAKE_VERSION="${CMAKE_VERSION:-${1:-3.30.4}}" + +if [ "$(whoami)" = root ]; then SUDO=; else SUDO="$(command -v sudo || command -v doas || true)"; fi + +if cmake --version | grep -q "$CMAKE_VERSION"; then + echo "CMake $CMAKE_VERSION is already installed." + exit 0 +elif [ -e /usr/local/bin/cmake ]; then + echo "Warning: existing cmake found at /usr/local/bin/cmake. Skipping installation." + exit 0 +elif [ -e /etc/apt/sources.list.d/kitware.list ]; then + echo "Warning: existing KitWare repository found. Skipping installation." + exit 0 +elif [ -e /etc/lsb-release ] && grep -qP 'DISTRIB_ID=Ubuntu' /etc/lsb-release; then + curl -fsSL https://apt.kitware.com/keys/kitware-archive-latest.asc \ + | $SUDO gpg --dearmor - \ + | $SUDO tee /usr/share/keyrings/kitware-archive-keyring.gpg >/dev/null + echo "deb [signed-by=/usr/share/keyrings/kitware-archive-keyring.gpg] https://apt.kitware.com/ubuntu/ $(lsb_release -cs) main" \ + | $SUDO tee /etc/apt/sources.list.d/kitware.list >/dev/null + $SUDO apt-get update + $SUDO apt-get install -y kitware-archive-keyring cmake==$CMAKE_VERSION +else + arch="$(uname -m)" + if [ "$arch" != "x86_64" ] && [ "$arch" != "aarch64" ]; then + echo "Error. Only x86_64 and aarch64 are supported." + exit 1 + fi + tempdir=$(mktemp -d) + pushd "$tempdir" + CMAKE_REPO="https://github.com/Kitware/CMake" + CMAKE_INSTALLER="cmake-$CMAKE_VERSION-linux-$arch.sh" + curl -fsSL --output cmake-$CMAKE_VERSION-SHA-256.txt \ + "$CMAKE_REPO/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-SHA-256.txt" + curl -fsSL --output $CMAKE_INSTALLER \ + "$CMAKE_REPO/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-linux-$arch.sh" + # workaround for old versions of sha256sum not having --ignore-missing + grep -F "cmake-$CMAKE_VERSION-linux-$arch.sh" "cmake-$CMAKE_VERSION-SHA-256.txt" \ + | sha256sum -c \ + | grep -qP "^${CMAKE_INSTALLER}: OK" + chmod +x cmake-$CMAKE_VERSION-linux-$arch.sh + $SUDO ./cmake-$CMAKE_VERSION-linux-$arch.sh --prefix=/usr/local --skip-license + popd + rm -rf "$tempdir" +fi diff --git a/script/install-mold b/script/install-mold index 1c451b72690614..fca8d73f21a10d 100755 --- a/script/install-mold +++ b/script/install-mold @@ -30,7 +30,7 @@ MOLD_REPO="${MOLD_REPO:-https://github.com/rui314/mold}" MOLD_URL="${MOLD_URL:-$MOLD_REPO}/releases/download/v$MOLD_VERSION/mold-$MOLD_VERSION-$(uname -m)-linux.tar.gz" echo "Downloading from $MOLD_URL" -curl --location --show-error --output - --retry 3 --retry-delay 5 "$MOLD_URL" \ +curl -fsSL --output - "$MOLD_URL" \ | $SUDO tar -C /usr/local --strip-components=1 --no-overwrite-dir -xzf - # Note this binary depends on the system libatomic.so.1 which is usually diff --git a/script/linux b/script/linux index 189407a5107b86..540bf063ec92ee 100755 --- a/script/linux +++ b/script/linux @@ -1,15 +1,25 @@ #!/usr/bin/env bash -set -ex +set -xeuo pipefail -# install the wasm toolchain -which rustup > /dev/null 2>&1 || curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +# if root or if sudo/unavailable, define an empty variable +if [ "$(id -u)" -eq 0 ] +then maysudo='' +else maysudo="$(command -v sudo || command -v doas || true)" +fi -# if sudo is not installed, define an empty alias -maysudo=$(command -v sudo || command -v doas || true) +function finalize { + # after packages install (curl, etc), get the rust toolchain + which rustup > /dev/null 2>&1 || curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + # verify the mold situation + if ! command -v mold >/dev/null 2>&1; then + echo "Warning: Mold binaries are unavailable on your system." >&2 + echo " Builds will be slower without mold. Try: scripts/install-mold" >&2 + fi + echo "Finished installing Linux dependencies with script/linux" +} -# Ubuntu, Debian, etc. -# https://packages.ubuntu.com/ +# Ubuntu, Debian, Mint, Kali, Pop!_OS, Raspbian, etc. apt=$(command -v apt-get || true) if [[ -n $apt ]]; then deps=( @@ -27,58 +37,88 @@ if [[ -n $apt ]]; then cmake clang jq + git + curl gettext-base elfutils libsqlite3-dev ) - # Ubuntu 20.04 / Debian Bullseye (including CI for release) - if grep -q "bullseye" /etc/debian_version; then - deps+=( - libstdc++-10-dev - ) - else - deps+=( - libstdc++-12-dev - mold - ) + if (grep -qP 'PRETTY_NAME="(.+24\.04)' /etc/os-release); then + deps+=( mold libstdc++-14-dev ) + elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+12|.+22\.04)' /etc/os-release); then + deps+=( mold libstdc++-12-dev ) + elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+11|.+20\.04)' /etc/os-release); then + deps+=( libstdc++-10-dev ) fi $maysudo "$apt" update $maysudo "$apt" install -y "${deps[@]}" + finalize exit 0 fi -# Fedora, CentOS, RHEL, etc. -# https://packages.fedoraproject.org/ +# Fedora, CentOS, RHEL, Alma, Amazon 2023, Oracle, etc. dnf=$(command -v dnf || true) -if [[ -n $dnf ]]; then +# Old Redhat (yum only): Amazon Linux 2, Oracle Linux 7, etc. +yum=$(command -v yum || true) + +if [[ -n $dnf ]] || [[ -n $yum ]]; then + pkg_cmd="${dnf:-${yum}}" deps=( gcc - g++ clang cmake - mold alsa-lib-devel fontconfig-devel wayland-devel libxkbcommon-x11-devel openssl-devel libzstd-devel - # Perl dependencies are needed for openssl-sys crate see https://docs.rs/openssl/latest/openssl/ - perl-FindBin - perl-IPC-Cmd - perl-File-Compare - perl-File-Copy vulkan-loader sqlite-devel + jq + git + tar ) + # perl used for building openssl-sys crate. See: https://docs.rs/openssl/latest/openssl/ + if grep -qP '^ID="(fedora)' /etc/os-release; then + deps+=( + perl-FindBin + perl-IPC-Cmd + perl-File-Compare + perl-File-Copy + mold + ) + elif grep grep -qP '^ID="(rhel|rocky|alma|centos|ol)' /etc/os-release; then + deps+=( perl-interpreter ) + fi - # libxkbcommon-x11-devel is in the crb repo on RHEL and CentOS, not needed for Fedora - if ! grep -q "Fedora" /etc/redhat-release; then - $maysudo "$dnf" config-manager --set-enabled crb + # gcc-c++ is g++ on RHEL8 and 8.x clones + if grep -qP '^ID="(rhel|rocky|alma|centos|ol)' /etc/os-release \ + && grep -qP '^VERSION_ID="8' /etc/os-release; then + deps+=( gcc-c++ ) + else + deps+=( g++ ) + fi + + # libxkbcommon-x11-devel is in a non-default repo on RHEL 8.x/9.x (except on AmazonLinux) + if grep -qP '^VERSION_ID="(8|9)' && grep -qP '^ID="(rhel|rocky|centos|alma|ol)' /etc/os-release; then + $maysudo dnf install -y 'dnf-command(config-manager)' + if grep -qP '^PRETTY_NAME="(AlmaLinux 8|Rocky Linux 8)' /etc/os-release; then + $maysudo dnf config-manager --set-enabled powertools + elif grep -qP '^PRETTY_NAME="((AlmaLinux|Rocky|CentOS Stream) 9|Red Hat.+(8|9))' /etc/os-release; then + $maysudo dnf config-manager --set-enabled crb + elif grep -qP '^PRETTY_NAME="Oracle Linux Server 8' /etc/os-release; then + $maysudo dnf config-manager --set-enabled ol8_codeready_builder + elif grep -qP '^PRETTY_NAME="Oracle Linux Server 9' /etc/os-release; then + $maysudo dnf config-manager --set-enabled ol9_codeready_builder + else + echo "Unexpected distro" && grep 'PRETTY_NAME' /etc/os-release && exit 1 + fi fi - $maysudo "$dnf" install -y "${deps[@]}" + $maysudo $pkg_cmd install -y "${deps[@]}" + finalize exit 0 fi @@ -99,10 +139,14 @@ if [[ -n $zyp ]]; then openssl-devel libzstd-devel libvulkan1 - mold sqlite3-devel + jq + git + tar + gzip ) $maysudo "$zyp" install -y "${deps[@]}" + finalize exit 0 fi @@ -125,8 +169,10 @@ if [[ -n $pacman ]]; then mold sqlite jq + git ) - $maysudo "$pacman" -S --needed --noconfirm "${deps[@]}" + $maysudo "$pacman" -Syu --needed --noconfirm "${deps[@]}" + finalize exit 0 fi @@ -153,6 +199,7 @@ if [[ -n $xbps ]]; then sqlite-devel ) $maysudo "$xbps" -Syu "${deps[@]}" + finalize exit 0 fi @@ -162,6 +209,7 @@ emerge=$(command -v emerge || true) if [[ -n $emerge ]]; then deps=( app-arch/zstd + app-misc/jq dev-libs/openssl dev-libs/wayland dev-util/cmake @@ -174,7 +222,9 @@ if [[ -n $emerge ]]; then dev-db/sqlite ) $maysudo "$emerge" -u "${deps[@]}" + finalize exit 0 fi echo "Unsupported Linux distribution in script/linux" +exit 1 From eb9fd62a90d9b8825e7280f1e27775f6892c3924 Mon Sep 17 00:00:00 2001 From: Andrey Arutiunian <110744283+andarut@users.noreply.github.com> Date: Tue, 1 Oct 2024 01:50:30 +0300 Subject: [PATCH 409/762] Fix rendering of markdown tables (#18315) - Closes: https://github.com/zed-industries/zed/issues/11024 ## Release Notes: - Improved Markdown Preview rendering of tables ## Before: ![image](https://github.com/user-attachments/assets/25f05604-38a9-4bde-901c-6d53a5d9d94d) Screenshot 2024-09-25 at 05 47 19 ## Now: ![image](https://github.com/user-attachments/assets/ce06f045-d0db-4b8c-a1fc-2811d35f2683) Screenshot 2024-09-25 at 05 47 48 ## Note: I'm not a Rust programmer and this is my first PR in Zed (because i just want to fix this, so i can view my notes in Markdown in Zed, not slow Visual Studio Code) - so there may be errors. I'm open for critic a --- .../markdown_preview/src/markdown_renderer.rs | 56 ++++++++++++++++--- 1 file changed, 48 insertions(+), 8 deletions(-) diff --git a/crates/markdown_preview/src/markdown_renderer.rs b/crates/markdown_preview/src/markdown_renderer.rs index ad169f036b9cec..8bab51b2c3f213 100644 --- a/crates/markdown_preview/src/markdown_renderer.rs +++ b/crates/markdown_preview/src/markdown_renderer.rs @@ -6,8 +6,8 @@ use crate::markdown_elements::{ }; use gpui::{ div, px, rems, AbsoluteLength, AnyElement, DefiniteLength, Div, Element, ElementId, - HighlightStyle, Hsla, InteractiveText, IntoElement, Keystroke, Modifiers, ParentElement, - SharedString, Styled, StyledText, TextStyle, WeakView, WindowContext, + HighlightStyle, Hsla, InteractiveText, IntoElement, Keystroke, Length, Modifiers, + ParentElement, SharedString, Styled, StyledText, TextStyle, WeakView, WindowContext, }; use settings::Settings; use std::{ @@ -16,7 +16,7 @@ use std::{ }; use theme::{ActiveTheme, SyntaxTheme, ThemeSettings}; use ui::{ - h_flex, v_flex, Checkbox, FluentBuilder, InteractiveElement, LinkPreview, Selection, + h_flex, relative, v_flex, Checkbox, FluentBuilder, InteractiveElement, LinkPreview, Selection, StatefulInteractiveElement, Tooltip, }; use workspace::Workspace; @@ -231,12 +231,48 @@ fn render_markdown_list_item( } fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) -> AnyElement { - let header = render_markdown_table_row(&parsed.header, &parsed.column_alignments, true, cx); + let mut max_lengths: Vec = vec![0; parsed.header.children.len()]; + + for (index, cell) in parsed.header.children.iter().enumerate() { + let length = cell.contents.len(); + max_lengths[index] = length; + } + + for row in &parsed.body { + for (index, cell) in row.children.iter().enumerate() { + let length = cell.contents.len(); + if length > max_lengths[index] { + max_lengths[index] = length; + } + } + } + + let total_max_length: usize = max_lengths.iter().sum(); + let max_column_widths: Vec = max_lengths + .iter() + .map(|&length| length as f32 / total_max_length as f32) + .collect(); + + let header = render_markdown_table_row( + &parsed.header, + &parsed.column_alignments, + &max_column_widths, + true, + cx, + ); let body: Vec = parsed .body .iter() - .map(|row| render_markdown_table_row(row, &parsed.column_alignments, false, cx)) + .map(|row| { + render_markdown_table_row( + row, + &parsed.column_alignments, + &max_column_widths, + false, + cx, + ) + }) .collect(); cx.with_common_p(v_flex()) @@ -249,14 +285,15 @@ fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) - fn render_markdown_table_row( parsed: &ParsedMarkdownTableRow, alignments: &Vec, + max_column_widths: &Vec, is_header: bool, cx: &mut RenderContext, ) -> AnyElement { let mut items = vec![]; - for cell in &parsed.children { + for (index, cell) in parsed.children.iter().enumerate() { let alignment = alignments - .get(items.len()) + .get(index) .copied() .unwrap_or(ParsedMarkdownTableAlignment::None); @@ -268,8 +305,11 @@ fn render_markdown_table_row( ParsedMarkdownTableAlignment::Right => v_flex().items_end(), }; + let max_width = max_column_widths.get(index).unwrap_or(&0.0); + let mut cell = container - .w_full() + .w(Length::Definite(relative(*max_width))) + .h_full() .child(contents) .px_2() .py_1() From 837756198f91e8b33238a0d186d9a4685e4d5b08 Mon Sep 17 00:00:00 2001 From: maan2003 <49202620+maan2003@users.noreply.github.com> Date: Tue, 1 Oct 2024 04:55:32 +0530 Subject: [PATCH 410/762] linux/wayland: Add support for pasting images (#17671) Release Notes: - You can now paste images into the Assistant Panel to include them as context on Linux wayland --- crates/gpui/src/platform/linux/platform.rs | 14 +- .../gpui/src/platform/linux/wayland/client.rs | 9 +- .../src/platform/linux/wayland/clipboard.rs | 148 ++++++++++++------ 3 files changed, 106 insertions(+), 65 deletions(-) diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index a0bd6b1d33d176..67f1a43cbe3220 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -603,17 +603,11 @@ pub(super) fn get_xkb_compose_state(cx: &xkb::Context) -> Option Result { +pub(super) unsafe fn read_fd(mut fd: FileDescriptor) -> Result> { let mut file = File::from_raw_fd(fd.as_raw_fd()); - - let mut buffer = String::new(); - file.read_to_string(&mut buffer)?; - - // Normalize the text to unix line endings, otherwise - // copying from eg: firefox inserts a lot of blank - // lines, and that is super annoying. - let result = buffer.replace("\r\n", "\n"); - Ok(result) + let mut buffer = Vec::new(); + file.read_to_end(&mut buffer)?; + Ok(buffer) } impl CursorStyle { diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index ba4971b63af6d2..f7ade828615f4f 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -1799,10 +1799,11 @@ impl Dispatch for WaylandClientStatePtr { let fd = pipe.read; drop(pipe.write); - let read_task = state - .common - .background_executor - .spawn(async { unsafe { read_fd(fd) } }); + let read_task = state.common.background_executor.spawn(async { + let buffer = unsafe { read_fd(fd)? }; + let text = String::from_utf8(buffer)?; + anyhow::Ok(text) + }); let this = this.clone(); state diff --git a/crates/gpui/src/platform/linux/wayland/clipboard.rs b/crates/gpui/src/platform/linux/wayland/clipboard.rs index 26b5256bddcd43..b3ec52d20515f9 100644 --- a/crates/gpui/src/platform/linux/wayland/clipboard.rs +++ b/crates/gpui/src/platform/linux/wayland/clipboard.rs @@ -6,10 +6,14 @@ use std::{ use calloop::{LoopHandle, PostAction}; use filedescriptor::Pipe; +use strum::IntoEnumIterator; use wayland_client::{protocol::wl_data_offer::WlDataOffer, Connection}; use wayland_protocols::wp::primary_selection::zv1::client::zwp_primary_selection_offer_v1::ZwpPrimarySelectionOfferV1; -use crate::{platform::linux::platform::read_fd, ClipboardItem, WaylandClientStatePtr}; +use crate::{ + hash, platform::linux::platform::read_fd, ClipboardEntry, ClipboardItem, Image, ImageFormat, + WaylandClientStatePtr, +}; pub(crate) const TEXT_MIME_TYPE: &str = "text/plain;charset=utf-8"; pub(crate) const FILE_LIST_MIME_TYPE: &str = "text/uri-list"; @@ -33,14 +37,30 @@ pub(crate) struct Clipboard { current_primary_offer: Option>, } +pub(crate) trait ReceiveData { + fn receive_data(&self, mime_type: String, fd: BorrowedFd<'_>); +} + +impl ReceiveData for WlDataOffer { + fn receive_data(&self, mime_type: String, fd: BorrowedFd<'_>) { + self.receive(mime_type, fd); + } +} + +impl ReceiveData for ZwpPrimarySelectionOfferV1 { + fn receive_data(&self, mime_type: String, fd: BorrowedFd<'_>) { + self.receive(mime_type, fd); + } +} + #[derive(Clone, Debug)] /// Wrapper for `WlDataOffer` and `ZwpPrimarySelectionOfferV1`, used to help track mime types. -pub(crate) struct DataOffer { +pub(crate) struct DataOffer { pub inner: T, mime_types: Vec, } -impl DataOffer { +impl DataOffer { pub fn new(offer: T) -> Self { Self { inner: offer, @@ -52,17 +72,71 @@ impl DataOffer { self.mime_types.push(mime_type) } - pub fn has_mime_type(&self, mime_type: &str) -> bool { + fn has_mime_type(&self, mime_type: &str) -> bool { self.mime_types.iter().any(|t| t == mime_type) } - pub fn find_text_mime_type(&self) -> Option { - for offered_mime_type in &self.mime_types { - if let Some(offer_text_mime_type) = ALLOWED_TEXT_MIME_TYPES - .into_iter() - .find(|text_mime_type| text_mime_type == offered_mime_type) - { - return Some(offer_text_mime_type.to_owned()); + fn read_bytes(&self, connection: &Connection, mime_type: &str) -> Option> { + let pipe = Pipe::new().unwrap(); + self.inner.receive_data(mime_type.to_string(), unsafe { + BorrowedFd::borrow_raw(pipe.write.as_raw_fd()) + }); + let fd = pipe.read; + drop(pipe.write); + + connection.flush().unwrap(); + + match unsafe { read_fd(fd) } { + Ok(bytes) => Some(bytes), + Err(err) => { + log::error!("error reading clipboard pipe: {err:?}"); + None + } + } + } + + fn read_text(&self, connection: &Connection) -> Option { + let mime_type = self.mime_types.iter().find(|&mime_type| { + ALLOWED_TEXT_MIME_TYPES + .iter() + .any(|&allowed| allowed == mime_type) + })?; + let bytes = self.read_bytes(connection, mime_type)?; + let text_content = match String::from_utf8(bytes) { + Ok(content) => content, + Err(e) => { + log::error!("Failed to convert clipboard content to UTF-8: {}", e); + return None; + } + }; + + // Normalize the text to unix line endings, otherwise + // copying from eg: firefox inserts a lot of blank + // lines, and that is super annoying. + let result = text_content.replace("\r\n", "\n"); + Some(ClipboardItem::new_string(result)) + } + + fn read_image(&self, connection: &Connection) -> Option { + for format in ImageFormat::iter() { + let mime_type = match format { + ImageFormat::Png => "image/png", + ImageFormat::Jpeg => "image/jpeg", + ImageFormat::Webp => "image/webp", + ImageFormat::Gif => "image/gif", + ImageFormat::Svg => "image/svg+xml", + ImageFormat::Bmp => "image/bmp", + ImageFormat::Tiff => "image/tiff", + }; + if !self.has_mime_type(mime_type) { + continue; + } + + if let Some(bytes) = self.read_bytes(connection, mime_type) { + let id = hash(&bytes); + return Some(ClipboardItem { + entries: vec![ClipboardEntry::Image(Image { format, bytes, id })], + }); } } None @@ -128,7 +202,7 @@ impl Clipboard { } pub fn read(&mut self) -> Option { - let offer = self.current_offer.clone()?; + let offer = self.current_offer.as_ref()?; if let Some(cached) = self.cached_read.clone() { return Some(cached); } @@ -137,30 +211,16 @@ impl Clipboard { return self.contents.clone(); } - let mime_type = offer.find_text_mime_type()?; - let pipe = Pipe::new().unwrap(); - offer.inner.receive(mime_type, unsafe { - BorrowedFd::borrow_raw(pipe.write.as_raw_fd()) - }); - let fd = pipe.read; - drop(pipe.write); - - self.connection.flush().unwrap(); + let item = offer + .read_text(&self.connection) + .or_else(|| offer.read_image(&self.connection))?; - match unsafe { read_fd(fd) } { - Ok(v) => { - self.cached_read = Some(ClipboardItem::new_string(v)); - self.cached_read.clone() - } - Err(err) => { - log::error!("error reading clipboard pipe: {err:?}"); - None - } - } + self.cached_read = Some(item.clone()); + Some(item) } pub fn read_primary(&mut self) -> Option { - let offer = self.current_primary_offer.clone()?; + let offer = self.current_primary_offer.as_ref()?; if let Some(cached) = self.cached_primary_read.clone() { return Some(cached); } @@ -169,26 +229,12 @@ impl Clipboard { return self.primary_contents.clone(); } - let mime_type = offer.find_text_mime_type()?; - let pipe = Pipe::new().unwrap(); - offer.inner.receive(mime_type, unsafe { - BorrowedFd::borrow_raw(pipe.write.as_raw_fd()) - }); - let fd = pipe.read; - drop(pipe.write); - - self.connection.flush().unwrap(); + let item = offer + .read_text(&self.connection) + .or_else(|| offer.read_image(&self.connection))?; - match unsafe { read_fd(fd) } { - Ok(v) => { - self.cached_primary_read = Some(ClipboardItem::new_string(v.clone())); - self.cached_primary_read.clone() - } - Err(err) => { - log::error!("error reading clipboard pipe: {err:?}"); - None - } - } + self.cached_primary_read = Some(item.clone()); + Some(item) } fn send_internal(&self, fd: OwnedFd, bytes: Vec) { From ecb7144b9571d8f99fb0cef19ae2ce554d2862ce Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Tue, 1 Oct 2024 07:26:20 +0800 Subject: [PATCH 411/762] windows: Fix can not set folder for `FileSaveDialog` (#17708) Closes #17622 Closes #17682 The story here is that `SHCreateItemFromParsingName` dose not accept UNC path. Video: https://github.com/user-attachments/assets/f4f7f671-5ab5-4965-9158-e7a79ac02654 Release Notes: - N/A --- crates/gpui/src/platform/windows/platform.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index d9f08c2247adf0..a900d0114bb234 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -664,10 +664,11 @@ fn file_save_dialog(directory: PathBuf) -> Result> { let dialog: IFileSaveDialog = unsafe { CoCreateInstance(&FileSaveDialog, None, CLSCTX_ALL)? }; if !directory.to_string_lossy().is_empty() { if let Some(full_path) = directory.canonicalize().log_err() { - let full_path = full_path.to_string_lossy().to_string(); - if !full_path.is_empty() { + let full_path = full_path.to_string_lossy(); + let full_path_str = full_path.trim_start_matches("\\\\?\\"); + if !full_path_str.is_empty() { let path_item: IShellItem = - unsafe { SHCreateItemFromParsingName(&HSTRING::from(&full_path), None)? }; + unsafe { SHCreateItemFromParsingName(&HSTRING::from(full_path_str), None)? }; unsafe { dialog.SetFolder(&path_item).log_err() }; } } From 77506afd8314a8b300971b4ac444955bad654652 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Tue, 1 Oct 2024 07:29:23 +0800 Subject: [PATCH 412/762] windows: Implement copy/paste images (#17852) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **Clipboard Behavior on Windows Under This PR:** | User Action | Zed’s Behavior | | ------------------- | -------------------------------------------------- | | Paste PNG | Worked | | Paste JPEG | Worked | | Paste WebP | Worked, but not in the way you expect (see Issue section below) | | Paste GIF | Partially worked (see Issue section below) | | Paste SVG | Partially worked (see Issue section below) | | Paste BMP | Worked, but not in the way you expect (see Issue section below) | | Paste TIFF | Worked, but not in the way you expect (see Issue section below) | | Paste Files | Worked, same behavior as macOS | | Copy image in Zed | Not tested, as I couldn’t find a way to copy images | --- **Differences Between the Windows and macOS Clipboard** The clipboard functionality on Windows differs significantly from macOS. On macOS, there can be multiple items in the clipboard, whereas, on Windows, the clipboard holds only a single item. You can retrieve different formats from the clipboard, but they are all just different representations of the same item. For example, when you copy a JPG image from Microsoft Word, the clipboard will contain data in several formats: - Microsoft Office proprietary data - JPG format data - PNG format data - SVG format data Please note that these formats all represent the same image, just in different formats. This is due to compatibility concerns on Windows, as various applications support different formats. Ideally, multiple formats should be placed on the clipboard to support more software. However, in general, supporting PNG will cover 99% of software, like Chrome, which only supports PNG and BMP formats. Additionally, since the clipboard on Windows only contains a single item, special handling is required when copying multiple objects, such as text and images. For instance, if you copy both text and an image simultaneously in Microsoft Word, Microsoft places the following data on the clipboard: - Microsoft Office proprietary data containing a lot of content such as text fonts, sizes, italics, positioning, image size, content, etc. - RTF data representing the above content in RTF format - HTML data representing the content in HTML format - Plain text data Therefore, for the current `ClipboardItem` implementation, if there are multiple `ClipboardEntry` objects to be placed on the clipboard, RTF or HTML formats are required. This PR does not support this scenario, and only supports copying or pasting a single item from the clipboard. --- **Known Issues** - **WebP, BMP, TIFF**: These formats are not explicitly supported in this PR. However, as mentioned earlier, in most cases, there are corresponding PNG format data on the clipboard. This PR retrieves data via PNG format, so users copying images in these formats from other sources will still see the images displayed correctly. - **GIF**: In this PR, GIFs are displayed, but for GIF images with multiple frames, the image will not animate and will freeze on a single frame. Since I observed the same behavior on macOS, I believe this is not an issue with this PR. - **SVG**: In this PR, only the top-left corner of the SVG image is displayed. Again, I observed the same behavior on macOS, so I believe this issue is not specific to this PR. --- I hope this provides a clearer understanding. Any feedback or suggestions on how to improve this are welcome. Release Notes: - N/A --- crates/gpui/src/platform/windows.rs | 2 + crates/gpui/src/platform/windows/clipboard.rs | 366 ++++++++++++++++++ crates/gpui/src/platform/windows/platform.rs | 140 +------ crates/gpui/src/platform/windows/window.rs | 23 +- crates/gpui/src/platform/windows/wrapper.rs | 34 +- 5 files changed, 408 insertions(+), 157 deletions(-) create mode 100644 crates/gpui/src/platform/windows/clipboard.rs diff --git a/crates/gpui/src/platform/windows.rs b/crates/gpui/src/platform/windows.rs index aa484eccdc29c1..84cf107c70516d 100644 --- a/crates/gpui/src/platform/windows.rs +++ b/crates/gpui/src/platform/windows.rs @@ -1,3 +1,4 @@ +mod clipboard; mod direct_write; mod dispatcher; mod display; @@ -8,6 +9,7 @@ mod util; mod window; mod wrapper; +pub(crate) use clipboard::*; pub(crate) use direct_write::*; pub(crate) use dispatcher::*; pub(crate) use display::*; diff --git a/crates/gpui/src/platform/windows/clipboard.rs b/crates/gpui/src/platform/windows/clipboard.rs new file mode 100644 index 00000000000000..7e607b6a8c7751 --- /dev/null +++ b/crates/gpui/src/platform/windows/clipboard.rs @@ -0,0 +1,366 @@ +use std::sync::LazyLock; + +use anyhow::Result; +use collections::{FxHashMap, FxHashSet}; +use itertools::Itertools; +use util::ResultExt; +use windows::Win32::{ + Foundation::HANDLE, + System::{ + DataExchange::{ + CloseClipboard, CountClipboardFormats, EmptyClipboard, EnumClipboardFormats, + GetClipboardData, GetClipboardFormatNameW, IsClipboardFormatAvailable, OpenClipboard, + RegisterClipboardFormatW, SetClipboardData, + }, + Memory::{GlobalAlloc, GlobalLock, GlobalUnlock, GMEM_MOVEABLE}, + Ole::{CF_HDROP, CF_UNICODETEXT}, + }, + UI::Shell::{DragQueryFileW, HDROP}, +}; +use windows_core::PCWSTR; + +use crate::{ + hash, ClipboardEntry, ClipboardItem, ClipboardString, Image, ImageFormat, SmartGlobal, +}; + +// https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-dragqueryfilew +const DRAGDROP_GET_FILES_COUNT: u32 = 0xFFFFFFFF; + +// Clipboard formats +static CLIPBOARD_HASH_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("GPUI internal text hash"))); +static CLIPBOARD_METADATA_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("GPUI internal metadata"))); +static CLIPBOARD_SVG_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("image/svg+xml"))); +static CLIPBOARD_GIF_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("GIF"))); +static CLIPBOARD_PNG_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("PNG"))); +static CLIPBOARD_JPG_FORMAT: LazyLock = + LazyLock::new(|| register_clipboard_format(windows::core::w!("JFIF"))); + +// Helper maps and sets +static FORMATS_MAP: LazyLock> = LazyLock::new(|| { + let mut formats_map = FxHashMap::default(); + formats_map.insert(CF_UNICODETEXT.0 as u32, ClipboardFormatType::Text); + formats_map.insert(*CLIPBOARD_PNG_FORMAT, ClipboardFormatType::Image); + formats_map.insert(*CLIPBOARD_GIF_FORMAT, ClipboardFormatType::Image); + formats_map.insert(*CLIPBOARD_JPG_FORMAT, ClipboardFormatType::Image); + formats_map.insert(*CLIPBOARD_SVG_FORMAT, ClipboardFormatType::Image); + formats_map.insert(CF_HDROP.0 as u32, ClipboardFormatType::Files); + formats_map +}); +static FORMATS_SET: LazyLock> = LazyLock::new(|| { + let mut formats_map = FxHashSet::default(); + formats_map.insert(CF_UNICODETEXT.0 as u32); + formats_map.insert(*CLIPBOARD_PNG_FORMAT); + formats_map.insert(*CLIPBOARD_GIF_FORMAT); + formats_map.insert(*CLIPBOARD_JPG_FORMAT); + formats_map.insert(*CLIPBOARD_SVG_FORMAT); + formats_map.insert(CF_HDROP.0 as u32); + formats_map +}); +static IMAGE_FORMATS_MAP: LazyLock> = LazyLock::new(|| { + let mut formats_map = FxHashMap::default(); + formats_map.insert(*CLIPBOARD_PNG_FORMAT, ImageFormat::Png); + formats_map.insert(*CLIPBOARD_GIF_FORMAT, ImageFormat::Gif); + formats_map.insert(*CLIPBOARD_JPG_FORMAT, ImageFormat::Jpeg); + formats_map.insert(*CLIPBOARD_SVG_FORMAT, ImageFormat::Svg); + formats_map +}); + +#[derive(Debug, Clone, Copy)] +enum ClipboardFormatType { + Text, + Image, + Files, +} + +pub(crate) fn write_to_clipboard(item: ClipboardItem) { + write_to_clipboard_inner(item).log_err(); + unsafe { CloseClipboard().log_err() }; +} + +pub(crate) fn read_from_clipboard() -> Option { + let result = read_from_clipboard_inner(); + unsafe { CloseClipboard().log_err() }; + result +} + +pub(crate) fn with_file_names(hdrop: HDROP, mut f: F) +where + F: FnMut(String), +{ + let file_count = unsafe { DragQueryFileW(hdrop, DRAGDROP_GET_FILES_COUNT, None) }; + for file_index in 0..file_count { + let filename_length = unsafe { DragQueryFileW(hdrop, file_index, None) } as usize; + let mut buffer = vec![0u16; filename_length + 1]; + let ret = unsafe { DragQueryFileW(hdrop, file_index, Some(buffer.as_mut_slice())) }; + if ret == 0 { + log::error!("unable to read file name"); + continue; + } + if let Some(file_name) = String::from_utf16(&buffer[0..filename_length]).log_err() { + f(file_name); + } + } +} + +fn register_clipboard_format(format: PCWSTR) -> u32 { + let ret = unsafe { RegisterClipboardFormatW(format) }; + if ret == 0 { + panic!( + "Error when registering clipboard format: {}", + std::io::Error::last_os_error() + ); + } + ret +} + +#[inline] +fn format_to_type(item_format: u32) -> &'static ClipboardFormatType { + FORMATS_MAP.get(&item_format).unwrap() +} + +// Currently, we only write the first item. +fn write_to_clipboard_inner(item: ClipboardItem) -> Result<()> { + unsafe { + OpenClipboard(None)?; + EmptyClipboard()?; + } + match item.entries().first() { + Some(entry) => match entry { + ClipboardEntry::String(string) => { + write_string_to_clipboard(string)?; + } + ClipboardEntry::Image(image) => { + write_image_to_clipboard(image)?; + } + }, + None => { + // Writing an empty list of entries just clears the clipboard. + } + } + Ok(()) +} + +fn write_string_to_clipboard(item: &ClipboardString) -> Result<()> { + let encode_wide = item.text.encode_utf16().chain(Some(0)).collect_vec(); + set_data_to_clipboard(&encode_wide, CF_UNICODETEXT.0 as u32)?; + + if let Some(metadata) = item.metadata.as_ref() { + let hash_result = { + let hash = ClipboardString::text_hash(&item.text); + hash.to_ne_bytes() + }; + let encode_wide = + unsafe { std::slice::from_raw_parts(hash_result.as_ptr().cast::(), 4) }; + set_data_to_clipboard(encode_wide, *CLIPBOARD_HASH_FORMAT)?; + + let metadata_wide = metadata.encode_utf16().chain(Some(0)).collect_vec(); + set_data_to_clipboard(&metadata_wide, *CLIPBOARD_METADATA_FORMAT)?; + } + Ok(()) +} + +fn set_data_to_clipboard(data: &[T], format: u32) -> Result<()> { + unsafe { + let global = GlobalAlloc(GMEM_MOVEABLE, std::mem::size_of_val(data))?; + let handle = GlobalLock(global); + std::ptr::copy_nonoverlapping(data.as_ptr(), handle as _, data.len()); + let _ = GlobalUnlock(global); + SetClipboardData(format, HANDLE(global.0))?; + } + Ok(()) +} + +// Here writing PNG to the clipboard to better support other apps. For more info, please ref to +// the PR. +fn write_image_to_clipboard(item: &Image) -> Result<()> { + match item.format { + ImageFormat::Svg => set_data_to_clipboard(item.bytes(), *CLIPBOARD_SVG_FORMAT)?, + ImageFormat::Gif => { + set_data_to_clipboard(item.bytes(), *CLIPBOARD_GIF_FORMAT)?; + let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Gif)?; + set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; + } + ImageFormat::Png => { + set_data_to_clipboard(item.bytes(), *CLIPBOARD_PNG_FORMAT)?; + let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Png)?; + set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; + } + ImageFormat::Jpeg => { + set_data_to_clipboard(item.bytes(), *CLIPBOARD_JPG_FORMAT)?; + let png_bytes = convert_image_to_png_format(item.bytes(), ImageFormat::Jpeg)?; + set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; + } + other => { + log::warn!( + "Clipboard unsupported image format: {:?}, convert to PNG instead.", + item.format + ); + let png_bytes = convert_image_to_png_format(item.bytes(), other)?; + set_data_to_clipboard(&png_bytes, *CLIPBOARD_PNG_FORMAT)?; + } + } + Ok(()) +} + +fn convert_image_to_png_format(bytes: &[u8], image_format: ImageFormat) -> Result> { + let image = image::load_from_memory_with_format(bytes, image_format.into())?; + let mut output_buf = Vec::new(); + image.write_to( + &mut std::io::Cursor::new(&mut output_buf), + image::ImageFormat::Png, + )?; + Ok(output_buf) +} + +fn read_from_clipboard_inner() -> Option { + unsafe { OpenClipboard(None) }.log_err()?; + with_best_match_format(|item_format| match format_to_type(item_format) { + ClipboardFormatType::Text => read_string_from_clipboard(), + ClipboardFormatType::Image => read_image_from_clipboard(item_format), + ClipboardFormatType::Files => read_files_from_clipboard(), + }) +} + +// Here, we enumerate all formats on the clipboard and find the first one that we can process. +// The reason we don't use `GetPriorityClipboardFormat` is that it sometimes returns the +// wrong format. +// For instance, when copying a JPEG image from Microsoft Word, there may be several formats +// on the clipboard: Jpeg, Png, Svg. +// If we use `GetPriorityClipboardFormat`, it will return Svg, which is not what we want. +fn with_best_match_format(f: F) -> Option +where + F: Fn(u32) -> Option, +{ + let count = unsafe { CountClipboardFormats() }; + let mut clipboard_format = 0; + for _ in 0..count { + clipboard_format = unsafe { EnumClipboardFormats(clipboard_format) }; + let Some(item_format) = FORMATS_SET.get(&clipboard_format) else { + continue; + }; + if let Some(entry) = f(*item_format) { + return Some(ClipboardItem { + entries: vec![entry], + }); + } + } + // log the formats that we don't support yet. + { + clipboard_format = 0; + for _ in 0..count { + clipboard_format = unsafe { EnumClipboardFormats(clipboard_format) }; + let mut buffer = [0u16; 64]; + unsafe { GetClipboardFormatNameW(clipboard_format, &mut buffer) }; + let format_name = String::from_utf16_lossy(&buffer); + log::warn!( + "Try to paste with unsupported clipboard format: {}, {}.", + clipboard_format, + format_name + ); + } + } + None +} + +fn read_string_from_clipboard() -> Option { + let text = { + let global = SmartGlobal::from_raw_ptr( + unsafe { GetClipboardData(CF_UNICODETEXT.0 as u32).log_err() }?.0, + ); + let text = PCWSTR(global.lock() as *const u16); + String::from_utf16_lossy(unsafe { text.as_wide() }) + }; + let Some(hash) = read_hash_from_clipboard() else { + return Some(ClipboardEntry::String(ClipboardString::new(text))); + }; + let Some(metadata) = read_metadata_from_clipboard() else { + return Some(ClipboardEntry::String(ClipboardString::new(text))); + }; + if hash == ClipboardString::text_hash(&text) { + Some(ClipboardEntry::String(ClipboardString { + text, + metadata: Some(metadata), + })) + } else { + Some(ClipboardEntry::String(ClipboardString::new(text))) + } +} + +fn read_hash_from_clipboard() -> Option { + if unsafe { IsClipboardFormatAvailable(*CLIPBOARD_HASH_FORMAT).is_err() } { + return None; + } + let global = + SmartGlobal::from_raw_ptr(unsafe { GetClipboardData(*CLIPBOARD_HASH_FORMAT).log_err() }?.0); + let raw_ptr = global.lock() as *const u16; + let hash_bytes: [u8; 8] = unsafe { + std::slice::from_raw_parts(raw_ptr.cast::(), 8) + .to_vec() + .try_into() + .log_err() + }?; + Some(u64::from_ne_bytes(hash_bytes)) +} + +fn read_metadata_from_clipboard() -> Option { + unsafe { IsClipboardFormatAvailable(*CLIPBOARD_METADATA_FORMAT).log_err()? }; + let global = SmartGlobal::from_raw_ptr( + unsafe { GetClipboardData(*CLIPBOARD_METADATA_FORMAT).log_err() }?.0, + ); + let text = PCWSTR(global.lock() as *const u16); + Some(String::from_utf16_lossy(unsafe { text.as_wide() })) +} + +fn read_image_from_clipboard(format: u32) -> Option { + let image_format = format_number_to_image_format(format)?; + read_image_for_type(format, *image_format) +} + +#[inline] +fn format_number_to_image_format(format_number: u32) -> Option<&'static ImageFormat> { + IMAGE_FORMATS_MAP.get(&format_number) +} + +fn read_image_for_type(format_number: u32, format: ImageFormat) -> Option { + let global = SmartGlobal::from_raw_ptr(unsafe { GetClipboardData(format_number).log_err() }?.0); + let image_ptr = global.lock(); + let iamge_size = global.size(); + let bytes = + unsafe { std::slice::from_raw_parts(image_ptr as *mut u8 as _, iamge_size).to_vec() }; + let id = hash(&bytes); + Some(ClipboardEntry::Image(Image { format, bytes, id })) +} + +fn read_files_from_clipboard() -> Option { + let global = + SmartGlobal::from_raw_ptr(unsafe { GetClipboardData(CF_HDROP.0 as u32).log_err() }?.0); + let hdrop = HDROP(global.lock()); + let mut filenames = String::new(); + with_file_names(hdrop, |file_name| { + filenames.push_str(&file_name); + }); + Some(ClipboardEntry::String(ClipboardString { + text: filenames, + metadata: None, + })) +} + +impl From for image::ImageFormat { + fn from(value: ImageFormat) -> Self { + match value { + ImageFormat::Png => image::ImageFormat::Png, + ImageFormat::Jpeg => image::ImageFormat::Jpeg, + ImageFormat::Webp => image::ImageFormat::WebP, + ImageFormat::Gif => image::ImageFormat::Gif, + // ImageFormat::Svg => todo!(), + ImageFormat::Bmp => image::ImageFormat::Bmp, + ImageFormat::Tiff => image::ImageFormat::Tiff, + _ => unreachable!(), + } + } +} diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index a900d0114bb234..30e7c402d26d85 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -17,24 +17,12 @@ use windows::{ core::*, Win32::{ Foundation::*, - Globalization::u_memcpy, Graphics::{ Gdi::*, Imaging::{CLSID_WICImagingFactory, IWICImagingFactory}, }, Security::Credentials::*, - System::{ - Com::*, - DataExchange::{ - CloseClipboard, EmptyClipboard, GetClipboardData, OpenClipboard, - RegisterClipboardFormatW, SetClipboardData, - }, - LibraryLoader::*, - Memory::{GlobalAlloc, GlobalLock, GlobalUnlock, GMEM_MOVEABLE}, - Ole::*, - SystemInformation::*, - Threading::*, - }, + System::{Com::*, LibraryLoader::*, Ole::*, SystemInformation::*, Threading::*}, UI::{Input::KeyboardAndMouse::*, Shell::*, WindowsAndMessaging::*}, }, UI::ViewManagement::UISettings, @@ -52,8 +40,6 @@ pub(crate) struct WindowsPlatform { background_executor: BackgroundExecutor, foreground_executor: ForegroundExecutor, text_system: Arc, - clipboard_hash_format: u32, - clipboard_metadata_format: u32, windows_version: WindowsVersion, bitmap_factory: ManuallyDrop, validation_number: usize, @@ -108,9 +94,6 @@ impl WindowsPlatform { let icon = load_icon().unwrap_or_default(); let state = RefCell::new(WindowsPlatformState::new()); let raw_window_handles = RwLock::new(SmallVec::new()); - let clipboard_hash_format = register_clipboard_format(CLIPBOARD_HASH_FORMAT).unwrap(); - let clipboard_metadata_format = - register_clipboard_format(CLIPBOARD_METADATA_FORMAT).unwrap(); let windows_version = WindowsVersion::new().expect("Error retrieve windows version"); let validation_number = rand::random::(); @@ -123,8 +106,6 @@ impl WindowsPlatform { background_executor, foreground_executor, text_system, - clipboard_hash_format, - clipboard_metadata_format, windows_version, bitmap_factory, validation_number, @@ -487,15 +468,11 @@ impl Platform for WindowsPlatform { } fn write_to_clipboard(&self, item: ClipboardItem) { - write_to_clipboard( - item, - self.clipboard_hash_format, - self.clipboard_metadata_format, - ); + write_to_clipboard(item); } fn read_from_clipboard(&self) -> Option { - read_from_clipboard(self.clipboard_hash_format, self.clipboard_metadata_format) + read_from_clipboard() } fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Task> { @@ -725,117 +702,6 @@ fn should_auto_hide_scrollbars() -> Result { Ok(ui_settings.AutoHideScrollBars()?) } -fn register_clipboard_format(format: PCWSTR) -> Result { - let ret = unsafe { RegisterClipboardFormatW(format) }; - if ret == 0 { - Err(anyhow::anyhow!( - "Error when registering clipboard format: {}", - std::io::Error::last_os_error() - )) - } else { - Ok(ret) - } -} - -fn write_to_clipboard(item: ClipboardItem, hash_format: u32, metadata_format: u32) { - write_to_clipboard_inner(item, hash_format, metadata_format).log_err(); - unsafe { CloseClipboard().log_err() }; -} - -fn write_to_clipboard_inner( - item: ClipboardItem, - hash_format: u32, - metadata_format: u32, -) -> Result<()> { - unsafe { - OpenClipboard(None)?; - EmptyClipboard()?; - let encode_wide = item - .text() - .unwrap_or_default() - .encode_utf16() - .chain(Some(0)) - .collect_vec(); - set_data_to_clipboard(&encode_wide, CF_UNICODETEXT.0 as u32)?; - - if let Some((metadata, text)) = item.metadata().zip(item.text()) { - let hash_result = { - let hash = ClipboardString::text_hash(&text); - hash.to_ne_bytes() - }; - let encode_wide = std::slice::from_raw_parts(hash_result.as_ptr().cast::(), 4); - set_data_to_clipboard(encode_wide, hash_format)?; - - let metadata_wide = metadata.encode_utf16().chain(Some(0)).collect_vec(); - set_data_to_clipboard(&metadata_wide, metadata_format)?; - } - } - Ok(()) -} - -fn set_data_to_clipboard(data: &[u16], format: u32) -> Result<()> { - unsafe { - let global = GlobalAlloc(GMEM_MOVEABLE, data.len() * 2)?; - let handle = GlobalLock(global); - u_memcpy(handle as _, data.as_ptr(), data.len() as _); - let _ = GlobalUnlock(global); - SetClipboardData(format, HANDLE(global.0))?; - } - Ok(()) -} - -fn read_from_clipboard(hash_format: u32, metadata_format: u32) -> Option { - let result = read_from_clipboard_inner(hash_format, metadata_format).log_err(); - unsafe { CloseClipboard().log_err() }; - result -} - -fn read_from_clipboard_inner(hash_format: u32, metadata_format: u32) -> Result { - unsafe { - OpenClipboard(None)?; - let text = { - let handle = GetClipboardData(CF_UNICODETEXT.0 as u32)?; - let text = PCWSTR(handle.0 as *const u16); - String::from_utf16_lossy(text.as_wide()) - }; - let Some(hash) = read_hash_from_clipboard(hash_format) else { - return Ok(ClipboardItem::new_string(text)); - }; - let Some(metadata) = read_metadata_from_clipboard(metadata_format) else { - return Ok(ClipboardItem::new_string(text)); - }; - if hash == ClipboardString::text_hash(&text) { - Ok(ClipboardItem::new_string_with_metadata(text, metadata)) - } else { - Ok(ClipboardItem::new_string(text)) - } - } -} - -fn read_hash_from_clipboard(hash_format: u32) -> Option { - unsafe { - let handle = GetClipboardData(hash_format).log_err()?; - let raw_ptr = handle.0 as *const u16; - let hash_bytes: [u8; 8] = std::slice::from_raw_parts(raw_ptr.cast::(), 8) - .to_vec() - .try_into() - .log_err()?; - Some(u64::from_ne_bytes(hash_bytes)) - } -} - -fn read_metadata_from_clipboard(metadata_format: u32) -> Option { - unsafe { - let handle = GetClipboardData(metadata_format).log_err()?; - let text = PCWSTR(handle.0 as *const u16); - Some(String::from_utf16_lossy(text.as_wide())) - } -} - -// clipboard -pub const CLIPBOARD_HASH_FORMAT: PCWSTR = windows::core::w!("zed-text-hash"); -pub const CLIPBOARD_METADATA_FORMAT: PCWSTR = windows::core::w!("zed-metadata"); - #[cfg(test)] mod tests { use crate::{ClipboardItem, Platform, WindowsPlatform}; diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index d5ea3be6cac5e6..b212a03a981481 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -735,23 +735,11 @@ impl IDropTarget_Impl for WindowsDragDropHandler_Impl { } let hdrop = idata.u.hGlobal.0 as *mut HDROP; let mut paths = SmallVec::<[PathBuf; 2]>::new(); - let file_count = DragQueryFileW(*hdrop, DRAGDROP_GET_FILES_COUNT, None); - for file_index in 0..file_count { - let filename_length = DragQueryFileW(*hdrop, file_index, None) as usize; - let mut buffer = vec![0u16; filename_length + 1]; - let ret = DragQueryFileW(*hdrop, file_index, Some(buffer.as_mut_slice())); - if ret == 0 { - log::error!("unable to read file name"); - continue; + with_file_names(*hdrop, |file_name| { + if let Some(path) = PathBuf::from_str(&file_name).log_err() { + paths.push(path); } - if let Some(file_name) = - String::from_utf16(&buffer[0..filename_length]).log_err() - { - if let Some(path) = PathBuf::from_str(&file_name).log_err() { - paths.push(path); - } - } - } + }); ReleaseStgMedium(&mut idata); let mut cursor_position = POINT { x: pt.x, y: pt.y }; ScreenToClient(self.0.hwnd, &mut cursor_position) @@ -1069,9 +1057,6 @@ fn calculate_client_rect( } } -// https://learn.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-dragqueryfilew -const DRAGDROP_GET_FILES_COUNT: u32 = 0xFFFFFFFF; - mod windows_renderer { use std::{num::NonZeroIsize, sync::Arc}; diff --git a/crates/gpui/src/platform/windows/wrapper.rs b/crates/gpui/src/platform/windows/wrapper.rs index 6015dffdab2997..e6e645e61aebdf 100644 --- a/crates/gpui/src/platform/windows/wrapper.rs +++ b/crates/gpui/src/platform/windows/wrapper.rs @@ -1,6 +1,11 @@ use std::ops::Deref; -use windows::Win32::{Foundation::HANDLE, UI::WindowsAndMessaging::HCURSOR}; +use util::ResultExt; +use windows::Win32::{ + Foundation::{HANDLE, HGLOBAL}, + System::Memory::{GlobalLock, GlobalSize, GlobalUnlock}, + UI::WindowsAndMessaging::HCURSOR, +}; #[derive(Debug, Clone, Copy)] pub(crate) struct SafeHandle { @@ -45,3 +50,30 @@ impl Deref for SafeCursor { &self.raw } } + +#[derive(Debug, Clone)] +pub(crate) struct SmartGlobal { + raw: HGLOBAL, +} + +impl SmartGlobal { + pub(crate) fn from_raw_ptr(ptr: *mut std::ffi::c_void) -> Self { + Self { raw: HGLOBAL(ptr) } + } + + pub(crate) fn lock(&self) -> *mut std::ffi::c_void { + unsafe { GlobalLock(self.raw) } + } + + pub(crate) fn size(&self) -> usize { + unsafe { GlobalSize(self.raw) } + } +} + +impl Drop for SmartGlobal { + fn drop(&mut self) { + unsafe { + GlobalUnlock(self.raw).log_err(); + } + } +} From 938a0679c004be4ae2b6c1e897de75e4b4e7aa24 Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Tue, 1 Oct 2024 07:39:19 +0800 Subject: [PATCH 413/762] gpui: Fix img element to auto size when only have width or height (#17994) Release Notes: - N/A --- We may only want to set the height of an image to limit the size and make the width adaptive. In HTML, we will only set width or height, and the other side will adapt and maintain the original image ratio. I changed this because I had a logo image that only to be limited in height, and then I found that setting the height of the `img` alone would not display correctly. I also tried to set `ObjectFit` in this Demo, but it seems that none of them can achieve the same effect as "After". ## Before before 2024-09-18 164029 ## After after 2024-09-18 172003 --- crates/gpui/examples/image/image.rs | 54 +++++++++++++++++++++-------- crates/gpui/src/elements/img.rs | 36 +++++++++++++------ 2 files changed, 65 insertions(+), 25 deletions(-) diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index 157dbdf70f1afc..24a94bf746ec47 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -69,25 +69,51 @@ struct ImageShowcase { impl Render for ImageShowcase { fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { div() - .flex() - .flex_row() .size_full() + .flex() + .flex_col() .justify_center() .items_center() .gap_8() .bg(rgb(0xFFFFFF)) - .child(ImageContainer::new( - "Image loaded from a local file", - self.local_resource.clone(), - )) - .child(ImageContainer::new( - "Image loaded from a remote resource", - self.remote_resource.clone(), - )) - .child(ImageContainer::new( - "Image loaded from an asset", - self.asset_resource.clone(), - )) + .child( + div() + .flex() + .flex_row() + .justify_center() + .items_center() + .gap_8() + .child(ImageContainer::new( + "Image loaded from a local file", + self.local_resource.clone(), + )) + .child(ImageContainer::new( + "Image loaded from a remote resource", + self.remote_resource.clone(), + )) + .child(ImageContainer::new( + "Image loaded from an asset", + self.asset_resource.clone(), + )), + ) + .child( + div() + .flex() + .flex_row() + .gap_8() + .child( + div() + .flex_col() + .child("Auto Width") + .child(img("https://picsum.photos/800/400").h(px(180.))), + ) + .child( + div() + .flex_col() + .child("Auto Height") + .child(img("https://picsum.photos/480/640").w(px(180.))), + ), + ) } } diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 63236d5309f145..58ee639265ec1a 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -1,7 +1,7 @@ use crate::{ px, AbsoluteLength, AppContext, Asset, Bounds, DefiniteLength, Element, ElementId, GlobalElementId, Hitbox, Image, InteractiveElement, Interactivity, IntoElement, LayoutId, - Length, ObjectFit, Pixels, RenderImage, SharedString, SharedUri, Size, StyleRefinement, Styled, + Length, ObjectFit, Pixels, RenderImage, SharedString, SharedUri, StyleRefinement, Styled, SvgSize, UriOrPath, WindowContext, }; use futures::{AsyncReadExt, Future}; @@ -187,16 +187,30 @@ impl Element for Img { let image_size = data.size(frame_index); - if let (Length::Auto, Length::Auto) = (style.size.width, style.size.height) - { - style.size = Size { - width: Length::Definite(DefiniteLength::Absolute( - AbsoluteLength::Pixels(px(image_size.width.0 as f32)), - )), - height: Length::Definite(DefiniteLength::Absolute( - AbsoluteLength::Pixels(px(image_size.height.0 as f32)), - )), - } + if let Length::Auto = style.size.width { + style.size.width = match style.size.height { + Length::Definite(DefiniteLength::Absolute( + AbsoluteLength::Pixels(height), + )) => Length::Definite( + px(image_size.width.0 as f32 * height.0 + / image_size.height.0 as f32) + .into(), + ), + _ => Length::Definite(px(image_size.width.0 as f32).into()), + }; + } + + if let Length::Auto = style.size.height { + style.size.height = match style.size.width { + Length::Definite(DefiniteLength::Absolute( + AbsoluteLength::Pixels(width), + )) => Length::Definite( + px(image_size.height.0 as f32 * width.0 + / image_size.width.0 as f32) + .into(), + ), + _ => Length::Definite(px(image_size.height.0 as f32).into()), + }; } if global_id.is_some() && data.frame_count() > 1 { From a752bbcee83f9cf853082e91c693f7bde197eb0f Mon Sep 17 00:00:00 2001 From: Patrick MARIE Date: Tue, 1 Oct 2024 01:51:05 +0200 Subject: [PATCH 414/762] Fix linux double click (#18504) Closes #17573 Release Notes: - Check that double clicks on Linux are triggered by same button. --- crates/gpui/src/platform/linux/wayland/client.rs | 7 +++++++ crates/gpui/src/platform/linux/x11/client.rs | 10 ++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index f7ade828615f4f..4b7816a73ac36c 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -236,6 +236,7 @@ pub struct DragState { } pub struct ClickState { + last_mouse_button: Option, last_click: Instant, last_location: Point, current_count: usize, @@ -535,6 +536,7 @@ impl WaylandClient { }, click: ClickState { last_click: Instant::now(), + last_mouse_button: None, last_location: Point::default(), current_count: 0, }, @@ -1524,6 +1526,10 @@ impl Dispatch for WaylandClientStatePtr { let click_elapsed = state.click.last_click.elapsed(); if click_elapsed < DOUBLE_CLICK_INTERVAL + && state + .click + .last_mouse_button + .is_some_and(|prev_button| prev_button == button) && is_within_click_distance( state.click.last_location, state.mouse_location.unwrap(), @@ -1535,6 +1541,7 @@ impl Dispatch for WaylandClientStatePtr { } state.click.last_click = Instant::now(); + state.click.last_mouse_button = Some(button); state.click.last_location = state.mouse_location.unwrap(); state.button_pressed = Some(button); diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index 7f5342a50360eb..5339cc95fd1b11 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -37,8 +37,8 @@ use crate::platform::linux::LinuxClient; use crate::platform::{LinuxCommon, PlatformWindow}; use crate::{ modifiers_from_xinput_info, point, px, AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, - DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, Pixels, Platform, - PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, Size, TouchPhase, + DisplayId, FileDropEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, Pixels, + Platform, PlatformDisplay, PlatformInput, Point, ScaledPixels, ScrollDelta, Size, TouchPhase, WindowParams, X11Window, }; @@ -122,6 +122,7 @@ pub struct X11ClientState { pub(crate) event_loop: Option>, pub(crate) last_click: Instant, + pub(crate) last_mouse_button: Option, pub(crate) last_location: Point, pub(crate) current_count: usize, @@ -404,6 +405,7 @@ impl X11Client { loop_handle: handle, common, last_click: Instant::now(), + last_mouse_button: None, last_location: Point::new(px(0.0), px(0.0)), current_count: 0, scale_factor, @@ -952,6 +954,9 @@ impl X11Client { let click_elapsed = state.last_click.elapsed(); if click_elapsed < DOUBLE_CLICK_INTERVAL + && state + .last_mouse_button + .is_some_and(|prev_button| prev_button == button) && is_within_click_distance(state.last_location, position) { state.current_count += 1; @@ -960,6 +965,7 @@ impl X11Client { } state.last_click = Instant::now(); + state.last_mouse_button = Some(button); state.last_location = position; let current_count = state.current_count; From 1d2172aba8c7718d36d363e8d4e7f8b1a3ad1cec Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 30 Sep 2024 21:07:10 -0400 Subject: [PATCH 415/762] docs: Correct glibc requirements (#18554) --- docs/src/linux.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/linux.md b/docs/src/linux.md index 3bba9c8f93aaae..33d12d0a8ca02f 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -16,7 +16,7 @@ The Zed installed by the script works best on systems that: - have a Vulkan compatible GPU available (for example Linux on an M-series macBook) - have a system-wide glibc (NixOS and Alpine do not by default) - - x86_64 (Intel/AMD): glibc version >= 2.29 (Ubuntu 20 and newer) + - x86_64 (Intel/AMD): glibc version >= 2.31 (Ubuntu 20 and newer) - aarch64 (ARM): glibc version >= 2.35 (Ubuntu 22 and newer) Both Nix and Alpine have third-party Zed packages available (though they are currently a few weeks out of date). If you'd like to use our builds they do work if you install a glibc compatibility layer. On NixOS you can try [nix-ld](https://github.com/Mic92/nix-ld), and on Alpine [gcompat](https://wiki.alpinelinux.org/wiki/Running_glibc_programs). From 39be9e5949483e8964322d9878e7d5cb794872cb Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Tue, 1 Oct 2024 09:25:02 +0800 Subject: [PATCH 416/762] gpui: Fix `show: false` support on Windows to create an invisible window (#18161) Release Notes: - N/A - The `show` of WindowOptions is valid on macOS but not on Windows, this changes to fix it to support create an invisible window. ```bash cargo run -p gpui --example window ``` ## Before https://github.com/user-attachments/assets/4157bdaa-39a7-44df-bbdc-30b00e9c61e9 ## After https://github.com/user-attachments/assets/d48fa524-0caa-4f87-932d-01d7a468c488 https://github.com/user-attachments/assets/dd052f15-c8db-4a2a-a6af-a7c0ffecca84 --- crates/gpui/examples/window.rs | 168 +++++++++++++++++++++ crates/gpui/src/platform/mac/window.rs | 2 +- crates/gpui/src/platform/windows/window.rs | 13 +- 3 files changed, 180 insertions(+), 3 deletions(-) create mode 100644 crates/gpui/examples/window.rs diff --git a/crates/gpui/examples/window.rs b/crates/gpui/examples/window.rs new file mode 100644 index 00000000000000..0f0d4287da723f --- /dev/null +++ b/crates/gpui/examples/window.rs @@ -0,0 +1,168 @@ +use gpui::*; +use prelude::FluentBuilder as _; + +struct SubWindow { + custom_titlebar: bool, +} + +fn button(text: &str, on_click: impl Fn(&mut WindowContext) + 'static) -> impl IntoElement { + div() + .id(SharedString::from(text.to_string())) + .flex_none() + .px_2() + .bg(rgb(0xf7f7f7)) + .active(|this| this.opacity(0.85)) + .border_1() + .border_color(rgb(0xe0e0e0)) + .rounded_md() + .cursor_pointer() + .child(text.to_string()) + .on_click(move |_, cx| on_click(cx)) +} + +impl Render for SubWindow { + fn render(&mut self, _: &mut ViewContext) -> impl IntoElement { + div() + .flex() + .flex_col() + .bg(rgb(0xffffff)) + .size_full() + .gap_2() + .when(self.custom_titlebar, |cx| { + cx.child( + div() + .flex() + .h(px(32.)) + .px_4() + .bg(gpui::blue()) + .text_color(gpui::white()) + .w_full() + .child( + div() + .flex() + .items_center() + .justify_center() + .size_full() + .child("Custom Titlebar"), + ), + ) + }) + .child( + div() + .p_8() + .gap_2() + .child("SubWindow") + .child(button("Close", |cx| { + cx.remove_window(); + })), + ) + } +} + +struct WindowDemo {} + +impl Render for WindowDemo { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let window_bounds = + WindowBounds::Windowed(Bounds::centered(None, size(px(300.0), px(300.0)), cx)); + + div() + .p_4() + .flex() + .flex_wrap() + .bg(rgb(0xffffff)) + .size_full() + .justify_center() + .items_center() + .gap_2() + .child(button("Normal", move |cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(window_bounds), + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: false, + }) + }, + ) + .unwrap(); + })) + .child(button("Popup", move |cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(window_bounds), + kind: WindowKind::PopUp, + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: false, + }) + }, + ) + .unwrap(); + })) + .child(button("Custom Titlebar", move |cx| { + cx.open_window( + WindowOptions { + titlebar: None, + window_bounds: Some(window_bounds), + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: true, + }) + }, + ) + .unwrap(); + })) + .child(button("Invisible", move |cx| { + cx.open_window( + WindowOptions { + show: false, + window_bounds: Some(window_bounds), + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: false, + }) + }, + ) + .unwrap(); + })) + .child(button("Unmovable", move |cx| { + cx.open_window( + WindowOptions { + is_movable: false, + titlebar: None, + window_bounds: Some(window_bounds), + ..Default::default() + }, + |cx| { + cx.new_view(|_cx| SubWindow { + custom_titlebar: false, + }) + }, + ) + .unwrap(); + })) + } +} + +fn main() { + App::new().run(|cx: &mut AppContext| { + let bounds = Bounds::centered(None, size(px(800.0), px(600.0)), cx); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |cx| cx.new_view(|_cx| WindowDemo {}), + ) + .unwrap(); + }); +} diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index 885c3565ccc899..5f9ee43dec48f7 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -707,7 +707,7 @@ impl MacWindow { } } - if focus { + if focus && show { native_window.makeKeyAndOrderFront_(nil); } else if show { native_window.orderFront_(nil); diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index b212a03a981481..d7b9a469b7d4e5 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -287,7 +287,7 @@ impl WindowsWindow { .map(|title| title.as_ref()) .unwrap_or(""), ); - let (dwexstyle, dwstyle) = if params.kind == WindowKind::PopUp { + let (dwexstyle, mut dwstyle) = if params.kind == WindowKind::PopUp { (WS_EX_TOOLWINDOW, WINDOW_STYLE(0x0)) } else { ( @@ -295,6 +295,10 @@ impl WindowsWindow { WS_THICKFRAME | WS_SYSMENU | WS_MAXIMIZEBOX | WS_MINIMIZEBOX, ) }; + if !params.show { + dwstyle |= WS_MINIMIZE; + } + let hinstance = get_module_handle(); let display = if let Some(display_id) = params.display_id { // if we obtain a display_id, then this ID must be valid. @@ -357,7 +361,12 @@ impl WindowsWindow { drop(lock); SetWindowPlacement(raw_hwnd, &placement)?; } - unsafe { ShowWindow(raw_hwnd, SW_SHOW).ok()? }; + + if params.show { + unsafe { ShowWindow(raw_hwnd, SW_SHOW).ok()? }; + } else { + unsafe { ShowWindow(raw_hwnd, SW_HIDE).ok()? }; + } Ok(Self(state_ptr)) } From 8d795ff882ec6ee6eb40346ee4fbcba88e6e6b6d Mon Sep 17 00:00:00 2001 From: Alvaro Parker <64918109+AlvaroParker@users.noreply.github.com> Date: Tue, 1 Oct 2024 03:04:35 -0300 Subject: [PATCH 417/762] Fix file watching for symlinks (#17609) Closes #17605 Watches for target paths if file watched is a symlink in Linux. This will check if the generated `notify::Event` has any paths matching the `root_path` and if the file is a symlink it will also check if the path matches the `target_root_path` (the path that the symlink is pointing to) Release Notes: - Added file watching for symlinks --- crates/fs/src/fs.rs | 84 +++++++++++++++++++++++++++++---------------- 1 file changed, 54 insertions(+), 30 deletions(-) diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index b649831fd2de69..7064448e168298 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -587,38 +587,54 @@ impl Fs for RealFs { let pending_paths: Arc>> = Default::default(); let root_path = path.to_path_buf(); - watcher::global(|g| { - let tx = tx.clone(); - let pending_paths = pending_paths.clone(); - g.add(move |event: ¬ify::Event| { - let kind = match event.kind { - EventKind::Create(_) => Some(PathEventKind::Created), - EventKind::Modify(_) => Some(PathEventKind::Changed), - EventKind::Remove(_) => Some(PathEventKind::Removed), - _ => None, - }; - let mut paths = event - .paths - .iter() - .filter_map(|path| { - path.starts_with(&root_path).then(|| PathEvent { - path: path.clone(), - kind, + // Check if root path is a symlink + let target_path = self.read_link(&path).await.ok(); + + watcher::global({ + let target_path = target_path.clone(); + |g| { + let tx = tx.clone(); + let pending_paths = pending_paths.clone(); + g.add(move |event: ¬ify::Event| { + let kind = match event.kind { + EventKind::Create(_) => Some(PathEventKind::Created), + EventKind::Modify(_) => Some(PathEventKind::Changed), + EventKind::Remove(_) => Some(PathEventKind::Removed), + _ => None, + }; + let mut paths = event + .paths + .iter() + .filter_map(|path| { + if let Some(target) = target_path.clone() { + if path.starts_with(target) { + return Some(PathEvent { + path: path.clone(), + kind, + }); + } + } else if path.starts_with(&root_path) { + return Some(PathEvent { + path: path.clone(), + kind, + }); + } + None }) - }) - .collect::>(); - - if !paths.is_empty() { - paths.sort(); - let mut pending_paths = pending_paths.lock(); - if pending_paths.is_empty() { - tx.try_send(()).ok(); + .collect::>(); + + if !paths.is_empty() { + paths.sort(); + let mut pending_paths = pending_paths.lock(); + if pending_paths.is_empty() { + tx.try_send(()).ok(); + } + util::extend_sorted(&mut *pending_paths, paths, usize::MAX, |a, b| { + a.path.cmp(&b.path) + }); } - util::extend_sorted(&mut *pending_paths, paths, usize::MAX, |a, b| { - a.path.cmp(&b.path) - }); - } - }) + }) + } }) .log_err(); @@ -626,6 +642,14 @@ impl Fs for RealFs { watcher.add(path).ok(); // Ignore "file doesn't exist error" and rely on parent watcher. + // Check if path is a symlink and follow the target parent + if let Some(target) = target_path { + watcher.add(&target).ok(); + if let Some(parent) = target.parent() { + watcher.add(parent).log_err(); + } + } + // watch the parent dir so we can tell when settings.json is created if let Some(parent) = path.parent() { watcher.add(parent).log_err(); From 72be8c5d145d0f7554fd7cb8d3ee3c6c4e99423d Mon Sep 17 00:00:00 2001 From: Jason Lee Date: Tue, 1 Oct 2024 14:20:24 +0800 Subject: [PATCH 418/762] gpui: Fix `hide`, `activate` method on Windows to hide/show application (#18164) Release Notes: - N/A Continue #18161 to fix `cx.hide`, `cx.activate` method on Windows to hide/show application. ## After https://github.com/user-attachments/assets/fe0070f9-7844-4c2a-b859-3e22ee4b8d22 --------- Co-authored-by: Mikayla Maki --- crates/gpui/examples/window.rs | 12 ++++++++++ crates/gpui/src/platform/windows/platform.rs | 24 ++++++++++++++++---- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/crates/gpui/examples/window.rs b/crates/gpui/examples/window.rs index 0f0d4287da723f..78a47782c956a8 100644 --- a/crates/gpui/examples/window.rs +++ b/crates/gpui/examples/window.rs @@ -150,6 +150,18 @@ impl Render for WindowDemo { ) .unwrap(); })) + .child(button("Hide Application", |cx| { + cx.hide(); + + // Restore the application after 3 seconds + cx.spawn(|mut cx| async move { + Timer::after(std::time::Duration::from_secs(3)).await; + cx.update(|cx| { + cx.activate(false); + }) + }) + .detach(); + })) } } diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 30e7c402d26d85..7f6677973b2fbd 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -33,6 +33,8 @@ use crate::*; pub(crate) struct WindowsPlatform { state: RefCell, raw_window_handles: RwLock>, + // The window handles that are hided by `hide` method. + hidden_windows: RwLock>, // The below members will never change throughout the entire lifecycle of the app. icon: HICON, main_receiver: flume::Receiver, @@ -100,6 +102,7 @@ impl WindowsPlatform { Self { state, raw_window_handles, + hidden_windows: RwLock::new(SmallVec::new()), icon, main_receiver, dispatch_event, @@ -295,12 +298,25 @@ impl Platform for WindowsPlatform { } } - // todo(windows) - fn activate(&self, _ignoring_other_apps: bool) {} + fn activate(&self, _ignoring_other_apps: bool) { + let mut state = self.hidden_windows.write(); + state.iter().for_each(|handle| unsafe { + ShowWindow(*handle, SW_SHOW).ok().log_err(); + }); + state.clear(); + } - // todo(windows) fn hide(&self) { - unimplemented!() + let mut state = self.hidden_windows.write(); + self.raw_window_handles + .read() + .iter() + .for_each(|handle| unsafe { + if IsWindowVisible(*handle).as_bool() { + state.push(*handle); + ShowWindow(*handle, SW_HIDE).ok().log_err(); + } + }); } // todo(windows) From 527c9097f848feef78cfb6cfd36d32eb048e6bb5 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Tue, 1 Oct 2024 01:14:40 -0600 Subject: [PATCH 419/762] linux: Various X11 scroll improvements (#18484) Closes #14089, #14416, #15970, #17230, #18485 Release Notes: - Fixed some cases where Linux X11 mouse scrolling doesn't work at all (#14089, ##15970, #17230) - Fixed handling of switching between Linux X11 devices used for scrolling (#14416, #18485) Change details: Also includes the commit from PR #18317 so I don't have to deal with merge conflicts. * Now uses valuator info from slave pointers rather than master. This hopefully fixes remaining cases where scrolling is fully broken. https://github.com/zed-industries/zed/issues/14089, https://github.com/zed-industries/zed/issues/15970, https://github.com/zed-industries/zed/issues/17230 * Per-device recording of "last scroll position" used to calculate deltas. This meant that swithing scroll devices would cause a sudden jump of scroll position, often to the beginning or end of the file (https://github.com/zed-industries/zed/issues/14416). * Re-queries device metadata when devices change, so that newly plugged in devices will work, and re-use of device-ids don't use old metadata with a new device. * xinput 2 documentation describes support for multiple master devices. I believe this implementation will support that, since now it just uses `DeviceInfo` from slave devices. The concept of master devices is only used in registering for events. * Uses popcount+bit masking to resolve axis indexes, instead of iterating bit indices. --------- Co-authored-by: Thorsten Ball --- crates/gpui/src/platform/linux/platform.rs | 2 +- .../gpui/src/platform/linux/wayland/client.rs | 8 +- crates/gpui/src/platform/linux/x11/client.rs | 406 ++++++++++++------ crates/gpui/src/platform/linux/x11/event.rs | 116 ++++- crates/gpui/src/platform/linux/x11/window.rs | 19 +- 5 files changed, 408 insertions(+), 143 deletions(-) diff --git a/crates/gpui/src/platform/linux/platform.rs b/crates/gpui/src/platform/linux/platform.rs index 67f1a43cbe3220..6e09badb493a64 100644 --- a/crates/gpui/src/platform/linux/platform.rs +++ b/crates/gpui/src/platform/linux/platform.rs @@ -45,7 +45,7 @@ use crate::{ use super::x11::X11Client; -pub(crate) const SCROLL_LINES: f64 = 3.0; +pub(crate) const SCROLL_LINES: f32 = 3.0; // Values match the defaults on GTK. // Taken from https://github.com/GNOME/gtk/blob/main/gtk/gtksettings.c#L320 diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 4b7816a73ac36c..f0015a7e5820bd 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -1634,10 +1634,10 @@ impl Dispatch for WaylandClientStatePtr { let scroll_delta = state.discrete_scroll_delta.get_or_insert(point(0.0, 0.0)); match axis { wl_pointer::Axis::VerticalScroll => { - scroll_delta.y += discrete as f32 * axis_modifier * SCROLL_LINES as f32; + scroll_delta.y += discrete as f32 * axis_modifier * SCROLL_LINES; } wl_pointer::Axis::HorizontalScroll => { - scroll_delta.x += discrete as f32 * axis_modifier * SCROLL_LINES as f32; + scroll_delta.x += discrete as f32 * axis_modifier * SCROLL_LINES; } _ => unreachable!(), } @@ -1662,10 +1662,10 @@ impl Dispatch for WaylandClientStatePtr { let wheel_percent = value120 as f32 / 120.0; match axis { wl_pointer::Axis::VerticalScroll => { - scroll_delta.y += wheel_percent * axis_modifier * SCROLL_LINES as f32; + scroll_delta.y += wheel_percent * axis_modifier * SCROLL_LINES; } wl_pointer::Axis::HorizontalScroll => { - scroll_delta.x += wheel_percent * axis_modifier * SCROLL_LINES as f32; + scroll_delta.x += wheel_percent * axis_modifier * SCROLL_LINES; } _ => unreachable!(), } diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index 5339cc95fd1b11..459f2045bb732a 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -1,6 +1,6 @@ use core::str; use std::cell::RefCell; -use std::collections::HashSet; +use std::collections::{BTreeMap, HashSet}; use std::ops::Deref; use std::path::PathBuf; use std::rc::{Rc, Weak}; @@ -42,7 +42,10 @@ use crate::{ WindowParams, X11Window, }; -use super::{button_of_key, modifiers_from_state, pressed_button_from_mask}; +use super::{ + button_or_scroll_from_event_detail, get_valuator_axis_index, modifiers_from_state, + pressed_button_from_mask, ButtonOrScroll, ScrollDirection, +}; use super::{X11Display, X11WindowStatePtr, XcbAtoms}; use super::{XimCallbackEvent, XimHandler}; use crate::platform::linux::platform::{DOUBLE_CLICK_INTERVAL, SCROLL_LINES}; @@ -51,7 +54,15 @@ use crate::platform::linux::{ get_xkb_compose_state, is_within_click_distance, open_uri_internal, reveal_path_internal, }; -pub(super) const XINPUT_MASTER_DEVICE: u16 = 1; +/// Value for DeviceId parameters which selects all devices. +pub(crate) const XINPUT_ALL_DEVICES: xinput::DeviceId = 0; + +/// Value for DeviceId parameters which selects all device groups. Events that +/// occur within the group are emitted by the group itself. +/// +/// In XInput 2's interface, these are referred to as "master devices", but that +/// terminology is both archaic and unclear. +pub(crate) const XINPUT_ALL_DEVICE_GROUPS: xinput::DeviceId = 1; pub(crate) struct WindowRef { window: X11WindowStatePtr, @@ -117,6 +128,26 @@ pub struct Xdnd { position: Point, } +#[derive(Debug)] +struct PointerDeviceState { + horizontal: ScrollAxisState, + vertical: ScrollAxisState, +} + +#[derive(Debug, Default)] +struct ScrollAxisState { + /// Valuator number for looking up this axis's scroll value. + valuator_number: Option, + /// Conversion factor from scroll units to lines. + multiplier: f32, + /// Last scroll value for calculating scroll delta. + /// + /// This gets set to `None` whenever it might be invalid - when devices change or when window focus changes. + /// The logic errs on the side of invalidating this, since the consequence is just skipping the delta of one scroll event. + /// The consequence of not invalidating it can be large invalid deltas, which are much more user visible. + scroll_value: Option, +} + pub struct X11ClientState { pub(crate) loop_handle: LoopHandle<'static, X11Client>, pub(crate) event_loop: Option>, @@ -152,9 +183,7 @@ pub struct X11ClientState { pub(crate) cursor_styles: HashMap, pub(crate) cursor_cache: HashMap, - pub(crate) scroll_class_data: Vec, - pub(crate) scroll_x: Option, - pub(crate) scroll_y: Option, + pointer_device_states: BTreeMap, pub(crate) common: LinuxCommon, pub(crate) clipboard: x11_clipboard::Clipboard, @@ -266,31 +295,21 @@ impl X11Client { .prefetch_extension_information(xinput::X11_EXTENSION_NAME) .unwrap(); + // Announce to X server that XInput up to 2.1 is supported. To increase this to 2.2 and + // beyond, support for touch events would need to be added. let xinput_version = xcb_connection - .xinput_xi_query_version(2, 0) + .xinput_xi_query_version(2, 1) .unwrap() .reply() .unwrap(); + // XInput 1.x is not supported. assert!( xinput_version.major_version >= 2, - "XInput Extension v2 not supported." + "XInput version >= 2 required." ); - let master_device_query = xcb_connection - .xinput_xi_query_device(XINPUT_MASTER_DEVICE) - .unwrap() - .reply() - .unwrap(); - let scroll_class_data = master_device_query - .infos - .iter() - .find(|info| info.type_ == xinput::DeviceType::MASTER_POINTER) - .unwrap() - .classes - .iter() - .filter_map(|class| class.data.as_scroll()) - .map(|class| *class) - .collect::>(); + let pointer_device_states = + get_new_pointer_device_states(&xcb_connection, &BTreeMap::new()); let atoms = XcbAtoms::new(&xcb_connection).unwrap().reply().unwrap(); @@ -434,9 +453,7 @@ impl X11Client { cursor_styles: HashMap::default(), cursor_cache: HashMap::default(), - scroll_class_data, - scroll_x: None, - scroll_y: None, + pointer_device_states, clipboard, clipboard_item: None, @@ -950,35 +967,56 @@ impl X11Client { window.handle_ime_commit(text); state = self.0.borrow_mut(); } - if let Some(button) = button_of_key(event.detail.try_into().unwrap()) { - let click_elapsed = state.last_click.elapsed(); - - if click_elapsed < DOUBLE_CLICK_INTERVAL - && state - .last_mouse_button - .is_some_and(|prev_button| prev_button == button) - && is_within_click_distance(state.last_location, position) - { - state.current_count += 1; - } else { - state.current_count = 1; - } - - state.last_click = Instant::now(); - state.last_mouse_button = Some(button); - state.last_location = position; - let current_count = state.current_count; + match button_or_scroll_from_event_detail(event.detail) { + Some(ButtonOrScroll::Button(button)) => { + let click_elapsed = state.last_click.elapsed(); + if click_elapsed < DOUBLE_CLICK_INTERVAL + && state + .last_mouse_button + .is_some_and(|prev_button| prev_button == button) + && is_within_click_distance(state.last_location, position) + { + state.current_count += 1; + } else { + state.current_count = 1; + } - drop(state); - window.handle_input(PlatformInput::MouseDown(crate::MouseDownEvent { - button, - position, - modifiers, - click_count: current_count, - first_mouse: false, - })); - } else { - log::warn!("Unknown button press: {event:?}"); + state.last_click = Instant::now(); + state.last_mouse_button = Some(button); + state.last_location = position; + let current_count = state.current_count; + + drop(state); + window.handle_input(PlatformInput::MouseDown(crate::MouseDownEvent { + button, + position, + modifiers, + click_count: current_count, + first_mouse: false, + })); + } + Some(ButtonOrScroll::Scroll(direction)) => { + drop(state); + // Emulated scroll button presses are sent simultaneously with smooth scrolling XinputMotion events. + // Since handling those events does the scrolling, they are skipped here. + if !event + .flags + .contains(xinput::PointerEventFlags::POINTER_EMULATED) + { + let scroll_delta = match direction { + ScrollDirection::Up => Point::new(0.0, SCROLL_LINES), + ScrollDirection::Down => Point::new(0.0, -SCROLL_LINES), + ScrollDirection::Left => Point::new(SCROLL_LINES, 0.0), + ScrollDirection::Right => Point::new(-SCROLL_LINES, 0.0), + }; + window.handle_input(PlatformInput::ScrollWheel( + make_scroll_wheel_event(position, scroll_delta, modifiers), + )); + } + } + None => { + log::error!("Unknown x11 button: {}", event.detail); + } } } Event::XinputButtonRelease(event) => { @@ -991,15 +1029,19 @@ impl X11Client { px(event.event_x as f32 / u16::MAX as f32 / state.scale_factor), px(event.event_y as f32 / u16::MAX as f32 / state.scale_factor), ); - if let Some(button) = button_of_key(event.detail.try_into().unwrap()) { - let click_count = state.current_count; - drop(state); - window.handle_input(PlatformInput::MouseUp(crate::MouseUpEvent { - button, - position, - modifiers, - click_count, - })); + match button_or_scroll_from_event_detail(event.detail) { + Some(ButtonOrScroll::Button(button)) => { + let click_count = state.current_count; + drop(state); + window.handle_input(PlatformInput::MouseUp(crate::MouseUpEvent { + button, + position, + modifiers, + click_count, + })); + } + Some(ButtonOrScroll::Scroll(_)) => {} + None => {} } } Event::XinputMotion(event) => { @@ -1014,12 +1056,6 @@ impl X11Client { state.modifiers = modifiers; drop(state); - let axisvalues = event - .axisvalues - .iter() - .map(|axisvalue| fp3232_to_f32(*axisvalue)) - .collect::>(); - if event.valuator_mask[0] & 3 != 0 { window.handle_input(PlatformInput::MouseMove(crate::MouseMoveEvent { position, @@ -1028,64 +1064,17 @@ impl X11Client { })); } - let mut valuator_idx = 0; - let scroll_class_data = self.0.borrow().scroll_class_data.clone(); - for shift in 0..32 { - if (event.valuator_mask[0] >> shift) & 1 == 0 { - continue; - } - - for scroll_class in &scroll_class_data { - if scroll_class.scroll_type == xinput::ScrollType::HORIZONTAL - && scroll_class.number == shift - { - let new_scroll = axisvalues[valuator_idx] - / fp3232_to_f32(scroll_class.increment) - * SCROLL_LINES as f32; - let old_scroll = self.0.borrow().scroll_x; - self.0.borrow_mut().scroll_x = Some(new_scroll); - - if let Some(old_scroll) = old_scroll { - let delta_scroll = old_scroll - new_scroll; - window.handle_input(PlatformInput::ScrollWheel( - crate::ScrollWheelEvent { - position, - delta: ScrollDelta::Lines(Point::new(delta_scroll, 0.0)), - modifiers, - touch_phase: TouchPhase::default(), - }, - )); - } - } else if scroll_class.scroll_type == xinput::ScrollType::VERTICAL - && scroll_class.number == shift - { - // the `increment` is the valuator delta equivalent to one positive unit of scrolling. Here that means SCROLL_LINES lines. - let new_scroll = axisvalues[valuator_idx] - / fp3232_to_f32(scroll_class.increment) - * SCROLL_LINES as f32; - let old_scroll = self.0.borrow().scroll_y; - self.0.borrow_mut().scroll_y = Some(new_scroll); - - if let Some(old_scroll) = old_scroll { - let delta_scroll = old_scroll - new_scroll; - let (x, y) = if !modifiers.shift { - (0.0, delta_scroll) - } else { - (delta_scroll, 0.0) - }; - window.handle_input(PlatformInput::ScrollWheel( - crate::ScrollWheelEvent { - position, - delta: ScrollDelta::Lines(Point::new(x, y)), - modifiers, - touch_phase: TouchPhase::default(), - }, - )); - } - } + state = self.0.borrow_mut(); + if let Some(mut pointer) = state.pointer_device_states.get_mut(&event.sourceid) { + let scroll_delta = get_scroll_delta_and_update_state(&mut pointer, &event); + drop(state); + if let Some(scroll_delta) = scroll_delta { + window.handle_input(PlatformInput::ScrollWheel(make_scroll_wheel_event( + position, + scroll_delta, + modifiers, + ))); } - - valuator_idx += 1; } } Event::XinputEnter(event) if event.mode == xinput::NotifyMode::NORMAL => { @@ -1095,10 +1084,10 @@ impl X11Client { state.mouse_focused_window = Some(event.event); } Event::XinputLeave(event) if event.mode == xinput::NotifyMode::NORMAL => { - self.0.borrow_mut().scroll_x = None; // Set last scroll to `None` so that a large delta isn't created if scrolling is done outside the window (the valuator is global) - self.0.borrow_mut().scroll_y = None; - let mut state = self.0.borrow_mut(); + + // Set last scroll values to `None` so that a large delta isn't created if scrolling is done outside the window (the valuator is global) + reset_all_pointer_device_scroll_positions(&mut state.pointer_device_states); state.mouse_focused_window = None; let pressed_button = pressed_button_from_mask(event.buttons[0]); let position = point( @@ -1117,6 +1106,26 @@ impl X11Client { })); window.set_hovered(false); } + Event::XinputHierarchy(event) => { + let mut state = self.0.borrow_mut(); + // Temporarily use `state.pointer_device_states` to only store pointers that still have valid scroll values. + // Any change to a device invalidates its scroll values. + for info in event.infos { + if is_pointer_device(info.type_) { + state.pointer_device_states.remove(&info.deviceid); + } + } + state.pointer_device_states = get_new_pointer_device_states( + &state.xcb_connection, + &state.pointer_device_states, + ); + } + Event::XinputDeviceChanged(event) => { + let mut state = self.0.borrow_mut(); + if let Some(mut pointer) = state.pointer_device_states.get_mut(&event.sourceid) { + reset_pointer_device_scroll_positions(&mut pointer); + } + } _ => {} }; @@ -1742,3 +1751,142 @@ fn xdnd_send_status( .send_event(false, target, EventMask::default(), message) .unwrap(); } + +/// Recomputes `pointer_device_states` by querying all pointer devices. +/// When a device is present in `scroll_values_to_preserve`, its value for `ScrollAxisState.scroll_value` is used. +fn get_new_pointer_device_states( + xcb_connection: &XCBConnection, + scroll_values_to_preserve: &BTreeMap, +) -> BTreeMap { + let devices_query_result = xcb_connection + .xinput_xi_query_device(XINPUT_ALL_DEVICES) + .unwrap() + .reply() + .unwrap(); + + let mut pointer_device_states = BTreeMap::new(); + pointer_device_states.extend( + devices_query_result + .infos + .iter() + .filter(|info| is_pointer_device(info.type_)) + .filter_map(|info| { + let scroll_data = info + .classes + .iter() + .filter_map(|class| class.data.as_scroll()) + .map(|class| *class) + .rev() + .collect::>(); + let old_state = scroll_values_to_preserve.get(&info.deviceid); + let old_horizontal = old_state.map(|state| &state.horizontal); + let old_vertical = old_state.map(|state| &state.vertical); + let horizontal = scroll_data + .iter() + .find(|data| data.scroll_type == xinput::ScrollType::HORIZONTAL) + .map(|data| scroll_data_to_axis_state(data, old_horizontal)); + let vertical = scroll_data + .iter() + .find(|data| data.scroll_type == xinput::ScrollType::VERTICAL) + .map(|data| scroll_data_to_axis_state(data, old_vertical)); + if horizontal.is_none() && vertical.is_none() { + None + } else { + Some(( + info.deviceid, + PointerDeviceState { + horizontal: horizontal.unwrap_or_else(Default::default), + vertical: vertical.unwrap_or_else(Default::default), + }, + )) + } + }), + ); + if pointer_device_states.is_empty() { + log::error!("Found no xinput mouse pointers."); + } + return pointer_device_states; +} + +/// Returns true if the device is a pointer device. Does not include pointer device groups. +fn is_pointer_device(type_: xinput::DeviceType) -> bool { + type_ == xinput::DeviceType::SLAVE_POINTER +} + +fn scroll_data_to_axis_state( + data: &xinput::DeviceClassDataScroll, + old_axis_state_with_valid_scroll_value: Option<&ScrollAxisState>, +) -> ScrollAxisState { + ScrollAxisState { + valuator_number: Some(data.number), + multiplier: SCROLL_LINES / fp3232_to_f32(data.increment), + scroll_value: old_axis_state_with_valid_scroll_value.and_then(|state| state.scroll_value), + } +} + +fn reset_all_pointer_device_scroll_positions( + pointer_device_states: &mut BTreeMap, +) { + pointer_device_states + .iter_mut() + .for_each(|(_, device_state)| reset_pointer_device_scroll_positions(device_state)); +} + +fn reset_pointer_device_scroll_positions(pointer: &mut PointerDeviceState) { + pointer.horizontal.scroll_value = None; + pointer.vertical.scroll_value = None; +} + +/// Returns the scroll delta for a smooth scrolling motion event, or `None` if no scroll data is present. +fn get_scroll_delta_and_update_state( + pointer: &mut PointerDeviceState, + event: &xinput::MotionEvent, +) -> Option> { + let delta_x = get_axis_scroll_delta_and_update_state(event, &mut pointer.horizontal); + let delta_y = get_axis_scroll_delta_and_update_state(event, &mut pointer.vertical); + if delta_x.is_some() || delta_y.is_some() { + Some(Point::new(delta_x.unwrap_or(0.0), delta_y.unwrap_or(0.0))) + } else { + None + } +} + +fn get_axis_scroll_delta_and_update_state( + event: &xinput::MotionEvent, + axis: &mut ScrollAxisState, +) -> Option { + let axis_index = get_valuator_axis_index(&event.valuator_mask, axis.valuator_number?)?; + if let Some(axis_value) = event.axisvalues.get(axis_index) { + let new_scroll = fp3232_to_f32(*axis_value); + let delta_scroll = axis + .scroll_value + .map(|old_scroll| (old_scroll - new_scroll) * axis.multiplier); + axis.scroll_value = Some(new_scroll); + delta_scroll + } else { + log::error!("Encountered invalid XInput valuator_mask, scrolling may not work properly."); + None + } +} + +fn make_scroll_wheel_event( + position: Point, + scroll_delta: Point, + modifiers: Modifiers, +) -> crate::ScrollWheelEvent { + // When shift is held down, vertical scrolling turns into horizontal scrolling. + let delta = if modifiers.shift { + Point { + x: scroll_delta.y, + y: 0.0, + } + } else { + scroll_delta + }; + crate::ScrollWheelEvent { + position, + delta: ScrollDelta::Lines(delta), + modifiers, + touch_phase: TouchPhase::default(), + } +} diff --git a/crates/gpui/src/platform/linux/x11/event.rs b/crates/gpui/src/platform/linux/x11/event.rs index 18ec392fc657ef..cd4cef24a33f33 100644 --- a/crates/gpui/src/platform/linux/x11/event.rs +++ b/crates/gpui/src/platform/linux/x11/event.rs @@ -5,13 +5,29 @@ use x11rb::protocol::{ use crate::{Modifiers, MouseButton, NavigationDirection}; -pub(crate) fn button_of_key(detail: xproto::Button) -> Option { +pub(crate) enum ButtonOrScroll { + Button(MouseButton), + Scroll(ScrollDirection), +} + +pub(crate) enum ScrollDirection { + Up, + Down, + Left, + Right, +} + +pub(crate) fn button_or_scroll_from_event_detail(detail: u32) -> Option { Some(match detail { - 1 => MouseButton::Left, - 2 => MouseButton::Middle, - 3 => MouseButton::Right, - 8 => MouseButton::Navigate(NavigationDirection::Back), - 9 => MouseButton::Navigate(NavigationDirection::Forward), + 1 => ButtonOrScroll::Button(MouseButton::Left), + 2 => ButtonOrScroll::Button(MouseButton::Middle), + 3 => ButtonOrScroll::Button(MouseButton::Right), + 4 => ButtonOrScroll::Scroll(ScrollDirection::Up), + 5 => ButtonOrScroll::Scroll(ScrollDirection::Down), + 6 => ButtonOrScroll::Scroll(ScrollDirection::Left), + 7 => ButtonOrScroll::Scroll(ScrollDirection::Right), + 8 => ButtonOrScroll::Button(MouseButton::Navigate(NavigationDirection::Back)), + 9 => ButtonOrScroll::Button(MouseButton::Navigate(NavigationDirection::Forward)), _ => return None, }) } @@ -48,3 +64,91 @@ pub(crate) fn pressed_button_from_mask(button_mask: u32) -> Option return None; }) } + +pub(crate) fn get_valuator_axis_index( + valuator_mask: &Vec, + valuator_number: u16, +) -> Option { + // XInput valuator masks have a 1 at the bit indexes corresponding to each + // valuator present in this event's axisvalues. Axisvalues is ordered from + // lowest valuator number to highest, so counting bits before the 1 bit for + // this valuator yields the index in axisvalues. + if bit_is_set_in_vec(&valuator_mask, valuator_number) { + Some(popcount_upto_bit_index(&valuator_mask, valuator_number) as usize) + } else { + None + } +} + +/// Returns the number of 1 bits in `bit_vec` for all bits where `i < bit_index`. +fn popcount_upto_bit_index(bit_vec: &Vec, bit_index: u16) -> u32 { + let array_index = bit_index as usize / 32; + let popcount: u32 = bit_vec + .get(array_index) + .map_or(0, |bits| keep_bits_upto(*bits, bit_index % 32).count_ones()); + if array_index == 0 { + popcount + } else { + // Valuator numbers over 32 probably never occur for scroll position, but may as well + // support it. + let leading_popcount: u32 = bit_vec + .iter() + .take(array_index) + .map(|bits| bits.count_ones()) + .sum(); + popcount + leading_popcount + } +} + +fn bit_is_set_in_vec(bit_vec: &Vec, bit_index: u16) -> bool { + let array_index = bit_index as usize / 32; + bit_vec + .get(array_index) + .map_or(false, |bits| bit_is_set(*bits, bit_index % 32)) +} + +fn bit_is_set(bits: u32, bit_index: u16) -> bool { + bits & (1 << bit_index) != 0 +} + +/// Sets every bit with `i >= bit_index` to 0. +fn keep_bits_upto(bits: u32, bit_index: u16) -> u32 { + if bit_index == 0 { + 0 + } else if bit_index >= 32 { + u32::MAX + } else { + bits & ((1 << bit_index) - 1) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_valuator_axis_index() { + assert!(get_valuator_axis_index(&vec![0b11], 0) == Some(0)); + assert!(get_valuator_axis_index(&vec![0b11], 1) == Some(1)); + assert!(get_valuator_axis_index(&vec![0b11], 2) == None); + + assert!(get_valuator_axis_index(&vec![0b100], 0) == None); + assert!(get_valuator_axis_index(&vec![0b100], 1) == None); + assert!(get_valuator_axis_index(&vec![0b100], 2) == Some(0)); + assert!(get_valuator_axis_index(&vec![0b100], 3) == None); + + assert!(get_valuator_axis_index(&vec![0b1010, 0], 0) == None); + assert!(get_valuator_axis_index(&vec![0b1010, 0], 1) == Some(0)); + assert!(get_valuator_axis_index(&vec![0b1010, 0], 2) == None); + assert!(get_valuator_axis_index(&vec![0b1010, 0], 3) == Some(1)); + + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 0) == None); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 1) == Some(0)); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 2) == None); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 3) == Some(1)); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 32) == Some(2)); + assert!(get_valuator_axis_index(&vec![0b1010, 0b1], 33) == None); + + assert!(get_valuator_axis_index(&vec![0b1010, 0b101], 34) == Some(3)); + } +} diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index 62b895d01f426c..2884c7ea91a51d 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -29,7 +29,7 @@ use std::{ sync::Arc, }; -use super::{X11Display, XINPUT_MASTER_DEVICE}; +use super::{X11Display, XINPUT_ALL_DEVICES, XINPUT_ALL_DEVICE_GROUPS}; x11rb::atom_manager! { pub XcbAtoms: AtomsCookie { XA_ATOM, @@ -475,7 +475,7 @@ impl X11WindowState { .xinput_xi_select_events( x_window, &[xinput::EventMask { - deviceid: XINPUT_MASTER_DEVICE, + deviceid: XINPUT_ALL_DEVICE_GROUPS, mask: vec![ xinput::XIEventMask::MOTION | xinput::XIEventMask::BUTTON_PRESS @@ -487,6 +487,19 @@ impl X11WindowState { ) .unwrap(); + xcb_connection + .xinput_xi_select_events( + x_window, + &[xinput::EventMask { + deviceid: XINPUT_ALL_DEVICES, + mask: vec![ + xinput::XIEventMask::HIERARCHY, + xinput::XIEventMask::DEVICE_CHANGED, + ], + }], + ) + .unwrap(); + xcb_connection.flush().unwrap(); let raw = RawWindow { @@ -1253,7 +1266,7 @@ impl PlatformWindow for X11Window { self.0.x_window, state.atoms._GTK_SHOW_WINDOW_MENU, [ - XINPUT_MASTER_DEVICE as u32, + XINPUT_ALL_DEVICE_GROUPS as u32, coords.dst_x as u32, coords.dst_y as u32, 0, From 7ce8797d78794f5a53e7a7d113e4c14a65e6297f Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Tue, 1 Oct 2024 12:16:44 +0200 Subject: [PATCH 420/762] ssh remoting: Add infrastructure to handle reconnects (#18572) This restructures the code in `remote` so that it's easier to replace the current SSH connection with a new one in case of disconnects/reconnects. Right now, it successfully reconnects, BUT we're still missing the big piece on the server-side: keeping the server process alive and reconnecting to the same process that keeps the project-state. Release Notes: - N/A --------- Co-authored-by: Bennet --- .../remote_editing_collaboration_tests.rs | 4 +- crates/collab/src/tests/test_server.rs | 4 +- crates/project/src/project.rs | 83 +- crates/project/src/terminals.rs | 8 +- crates/recent_projects/src/ssh_connections.rs | 6 +- crates/remote/src/remote.rs | 2 +- crates/remote/src/ssh_session.rs | 826 +++++++++++------- crates/remote_server/src/headless_project.rs | 4 +- crates/remote_server/src/main.rs | 5 +- .../remote_server/src/remote_editing_tests.rs | 11 +- crates/workspace/src/workspace.rs | 4 +- 11 files changed, 559 insertions(+), 398 deletions(-) diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index a9cc32c1dd3a67..7de50511ea2766 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -4,7 +4,7 @@ use fs::{FakeFs, Fs as _}; use gpui::{Context as _, TestAppContext}; use language::language_settings::all_language_settings; use project::ProjectPath; -use remote::SshSession; +use remote::SshRemoteClient; use remote_server::HeadlessProject; use serde_json::json; use std::{path::Path, sync::Arc}; @@ -24,7 +24,7 @@ async fn test_sharing_an_ssh_remote_project( .await; // Set up project on remote FS - let (client_ssh, server_ssh) = SshSession::fake(cx_a, server_cx); + let (client_ssh, server_ssh) = SshRemoteClient::fake(cx_a, server_cx); let remote_fs = FakeFs::new(server_cx.executor()); remote_fs .insert_tree( diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 5ff4a720741bc8..5e7d935c364382 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -25,7 +25,7 @@ use node_runtime::NodeRuntime; use notifications::NotificationStore; use parking_lot::Mutex; use project::{Project, WorktreeId}; -use remote::SshSession; +use remote::SshRemoteClient; use rpc::{ proto::{self, ChannelRole}, RECEIVE_TIMEOUT, @@ -835,7 +835,7 @@ impl TestClient { pub async fn build_ssh_project( &self, root_path: impl AsRef, - ssh: Arc, + ssh: Arc, cx: &mut TestAppContext, ) -> (Model, WorktreeId) { let project = cx.update(|cx| { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b91250e6b2c4a3..dadbd394bbf9b5 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -54,7 +54,7 @@ use parking_lot::{Mutex, RwLock}; use paths::{local_tasks_file_relative_path, local_vscode_tasks_file_relative_path}; pub use prettier_store::PrettierStore; use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent}; -use remote::SshSession; +use remote::SshRemoteClient; use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient, ErrorCode}; use search::{SearchInputKind, SearchQuery, SearchResult}; use search_history::SearchHistory; @@ -138,7 +138,7 @@ pub struct Project { join_project_response_message_id: u32, user_store: Model, fs: Arc, - ssh_session: Option>, + ssh_client: Option>, client_state: ProjectClientState, collaborators: HashMap, client_subscriptions: Vec, @@ -643,7 +643,7 @@ impl Project { user_store, settings_observer, fs, - ssh_session: None, + ssh_client: None, buffers_needing_diff: Default::default(), git_diff_debouncer: DebouncedDelay::new(), terminals: Terminals { @@ -664,7 +664,7 @@ impl Project { } pub fn ssh( - ssh: Arc, + ssh: Arc, client: Arc, node: NodeRuntime, user_store: Model, @@ -682,14 +682,14 @@ impl Project { SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); let worktree_store = - cx.new_model(|_| WorktreeStore::remote(false, ssh.clone().into(), 0, None)); + cx.new_model(|_| WorktreeStore::remote(false, ssh.to_proto_client(), 0, None)); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); let buffer_store = cx.new_model(|cx| { BufferStore::remote( worktree_store.clone(), - ssh.clone().into(), + ssh.to_proto_client(), SSH_PROJECT_ID, cx, ) @@ -698,7 +698,7 @@ impl Project { .detach(); let settings_observer = cx.new_model(|cx| { - SettingsObserver::new_ssh(ssh.clone().into(), worktree_store.clone(), cx) + SettingsObserver::new_ssh(ssh.to_proto_client(), worktree_store.clone(), cx) }); cx.subscribe(&settings_observer, Self::on_settings_observer_event) .detach(); @@ -709,7 +709,7 @@ impl Project { buffer_store.clone(), worktree_store.clone(), languages.clone(), - ssh.clone().into(), + ssh.to_proto_client(), SSH_PROJECT_ID, cx, ) @@ -733,7 +733,7 @@ impl Project { user_store, settings_observer, fs, - ssh_session: Some(ssh.clone()), + ssh_client: Some(ssh.clone()), buffers_needing_diff: Default::default(), git_diff_debouncer: DebouncedDelay::new(), terminals: Terminals { @@ -751,7 +751,7 @@ impl Project { search_excluded_history: Self::new_search_history(), }; - let client: AnyProtoClient = ssh.clone().into(); + let client: AnyProtoClient = ssh.to_proto_client(); ssh.subscribe_to_entity(SSH_PROJECT_ID, &cx.handle()); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.buffer_store); @@ -907,7 +907,7 @@ impl Project { user_store: user_store.clone(), snippets, fs, - ssh_session: None, + ssh_client: None, settings_observer: settings_observer.clone(), client_subscriptions: Default::default(), _subscriptions: vec![cx.on_release(Self::release)], @@ -1230,7 +1230,7 @@ impl Project { match self.client_state { ProjectClientState::Remote { replica_id, .. } => replica_id, _ => { - if self.ssh_session.is_some() { + if self.ssh_client.is_some() { 1 } else { 0 @@ -1638,7 +1638,7 @@ impl Project { pub fn is_local(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => { - self.ssh_session.is_none() + self.ssh_client.is_none() } ProjectClientState::Remote { .. } => false, } @@ -1647,7 +1647,7 @@ impl Project { pub fn is_via_ssh(&self) -> bool { match &self.client_state { ProjectClientState::Local | ProjectClientState::Shared { .. } => { - self.ssh_session.is_some() + self.ssh_client.is_some() } ProjectClientState::Remote { .. } => false, } @@ -1933,8 +1933,9 @@ impl Project { } BufferStoreEvent::BufferChangedFilePath { .. } => {} BufferStoreEvent::BufferDropped(buffer_id) => { - if let Some(ref ssh_session) = self.ssh_session { - ssh_session + if let Some(ref ssh_client) = self.ssh_client { + ssh_client + .to_proto_client() .send(proto::CloseBuffer { project_id: 0, buffer_id: buffer_id.to_proto(), @@ -2139,13 +2140,14 @@ impl Project { } => { let operation = language::proto::serialize_operation(operation); - if let Some(ssh) = &self.ssh_session { - ssh.send(proto::UpdateBuffer { - project_id: 0, - buffer_id: buffer_id.to_proto(), - operations: vec![operation.clone()], - }) - .ok(); + if let Some(ssh) = &self.ssh_client { + ssh.to_proto_client() + .send(proto::UpdateBuffer { + project_id: 0, + buffer_id: buffer_id.to_proto(), + operations: vec![operation.clone()], + }) + .ok(); } self.enqueue_buffer_ordered_message(BufferOrderedMessage::Operation { @@ -2825,14 +2827,13 @@ impl Project { ) -> Receiver> { let (tx, rx) = smol::channel::unbounded(); - let (client, remote_id): (AnyProtoClient, _) = - if let Some(ssh_session) = self.ssh_session.clone() { - (ssh_session.into(), 0) - } else if let Some(remote_id) = self.remote_id() { - (self.client.clone().into(), remote_id) - } else { - return rx; - }; + let (client, remote_id): (AnyProtoClient, _) = if let Some(ssh_client) = &self.ssh_client { + (ssh_client.to_proto_client(), 0) + } else if let Some(remote_id) = self.remote_id() { + (self.client.clone().into(), remote_id) + } else { + return rx; + }; let request = client.request(proto::FindSearchCandidates { project_id: remote_id, @@ -2961,11 +2962,13 @@ impl Project { exists.then(|| ResolvedPath::AbsPath(expanded)) }) - } else if let Some(ssh_session) = self.ssh_session.as_ref() { - let request = ssh_session.request(proto::CheckFileExists { - project_id: SSH_PROJECT_ID, - path: path.to_string(), - }); + } else if let Some(ssh_client) = self.ssh_client.as_ref() { + let request = ssh_client + .to_proto_client() + .request(proto::CheckFileExists { + project_id: SSH_PROJECT_ID, + path: path.to_string(), + }); cx.background_executor().spawn(async move { let response = request.await.log_err()?; if response.exists { @@ -3035,13 +3038,13 @@ impl Project { ) -> Task>> { if self.is_local() { DirectoryLister::Local(self.fs.clone()).list_directory(query, cx) - } else if let Some(session) = self.ssh_session.as_ref() { + } else if let Some(session) = self.ssh_client.as_ref() { let request = proto::ListRemoteDirectory { dev_server_id: SSH_PROJECT_ID, path: query, }; - let response = session.request(request); + let response = session.to_proto_client().request(request); cx.background_executor().spawn(async move { let response = response.await?; Ok(response.entries.into_iter().map(PathBuf::from).collect()) @@ -3465,11 +3468,11 @@ impl Project { cx: AsyncAppContext, ) -> Result { let buffer_store = this.read_with(&cx, |this, cx| { - if let Some(ssh) = &this.ssh_session { + if let Some(ssh) = &this.ssh_client { let mut payload = envelope.payload.clone(); payload.project_id = 0; cx.background_executor() - .spawn(ssh.request(payload)) + .spawn(ssh.to_proto_client().request(payload)) .detach_and_log_err(cx); } this.buffer_store.clone() diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 54dd48cf433ff3..7175b75e22a323 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -67,8 +67,12 @@ impl Project { } fn ssh_command(&self, cx: &AppContext) -> Option { - if let Some(ssh_session) = self.ssh_session.as_ref() { - return Some(SshCommand::Direct(ssh_session.ssh_args())); + if let Some(args) = self + .ssh_client + .as_ref() + .and_then(|session| session.ssh_args()) + { + return Some(SshCommand::Direct(args)); } let dev_server_project_id = self.dev_server_project_id()?; diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index dd30f15f267fc7..d0fffc031f0bff 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -11,7 +11,7 @@ use gpui::{ Transformation, View, }; use release_channel::{AppVersion, ReleaseChannel}; -use remote::{SshConnectionOptions, SshPlatform, SshSession}; +use remote::{SshConnectionOptions, SshPlatform, SshRemoteClient}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; @@ -376,12 +376,12 @@ pub fn connect_over_ssh( connection_options: SshConnectionOptions, ui: View, cx: &mut WindowContext, -) -> Task>> { +) -> Task>> { let window = cx.window_handle(); let known_password = connection_options.password.clone(); cx.spawn(|mut cx| async move { - remote::SshSession::client( + remote::SshRemoteClient::new( connection_options, Arc::new(SshClientDelegate { window, diff --git a/crates/remote/src/remote.rs b/crates/remote/src/remote.rs index 23f798c1914dbf..c3d9e8f9cc125e 100644 --- a/crates/remote/src/remote.rs +++ b/crates/remote/src/remote.rs @@ -2,4 +2,4 @@ pub mod json_log; pub mod protocol; pub mod ssh_session; -pub use ssh_session::{SshClientDelegate, SshConnectionOptions, SshPlatform, SshSession}; +pub use ssh_session::{SshClientDelegate, SshConnectionOptions, SshPlatform, SshRemoteClient}; diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 915595fd9d2957..fe1e42fe966309 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -7,19 +7,23 @@ use crate::{ use anyhow::{anyhow, Context as _, Result}; use collections::HashMap; use futures::{ - channel::{mpsc, oneshot}, + channel::{ + mpsc::{self, UnboundedReceiver, UnboundedSender}, + oneshot, + }, future::BoxFuture, - select_biased, AsyncReadExt as _, AsyncWriteExt as _, Future, FutureExt as _, StreamExt as _, + select_biased, AsyncReadExt as _, AsyncWriteExt as _, Future, FutureExt as _, SinkExt, + StreamExt as _, }; use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion, Task}; use parking_lot::Mutex; use rpc::{ proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage}, - EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, RpcError, + AnyProtoClient, EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, RpcError, }; use smol::{ fs, - process::{self, Stdio}, + process::{self, Child, Stdio}, }; use std::{ any::TypeId, @@ -44,22 +48,6 @@ pub struct SshSocket { socket_path: PathBuf, } -pub struct SshSession { - next_message_id: AtomicU32, - response_channels: ResponseChannels, // Lock - outgoing_tx: mpsc::UnboundedSender, - spawn_process_tx: mpsc::UnboundedSender, - client_socket: Option, - state: Mutex, // Lock - _io_task: Option>>, -} - -struct SshClientState { - socket: SshSocket, - master_process: process::Child, - _temp_dir: TempDir, -} - #[derive(Debug, Clone, PartialEq, Eq)] pub struct SshConnectionOptions { pub host: String, @@ -105,18 +93,13 @@ impl SshConnectionOptions { } } -struct SpawnRequest { - command: String, - process_tx: oneshot::Sender, -} - #[derive(Copy, Clone, Debug)] pub struct SshPlatform { pub os: &'static str, pub arch: &'static str, } -pub trait SshClientDelegate { +pub trait SshClientDelegate: Send + Sync { fn ask_password( &self, prompt: String, @@ -132,48 +115,249 @@ pub trait SshClientDelegate { fn set_error(&self, error_message: String, cx: &mut AsyncAppContext); } -type ResponseChannels = Mutex)>>>; +impl SshSocket { + fn ssh_command>(&self, program: S) -> process::Command { + let mut command = process::Command::new("ssh"); + self.ssh_options(&mut command) + .arg(self.connection_options.ssh_url()) + .arg(program); + command + } + + fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command { + command + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .args(["-o", "ControlMaster=no", "-o"]) + .arg(format!("ControlPath={}", self.socket_path.display())) + } + + fn ssh_args(&self) -> Vec { + vec![ + "-o".to_string(), + "ControlMaster=no".to_string(), + "-o".to_string(), + format!("ControlPath={}", self.socket_path.display()), + self.connection_options.ssh_url(), + ] + } +} -impl SshSession { - pub async fn client( +async fn run_cmd(command: &mut process::Command) -> Result { + let output = command.output().await?; + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + Err(anyhow!( + "failed to run command: {}", + String::from_utf8_lossy(&output.stderr) + )) + } +} +#[cfg(unix)] +async fn read_with_timeout( + stdout: &mut process::ChildStdout, + timeout: std::time::Duration, + output: &mut Vec, +) -> Result<(), std::io::Error> { + smol::future::or( + async { + stdout.read_to_end(output).await?; + Ok::<_, std::io::Error>(()) + }, + async { + smol::Timer::after(timeout).await; + + Err(std::io::Error::new( + std::io::ErrorKind::TimedOut, + "Read operation timed out", + )) + }, + ) + .await +} + +struct ChannelForwarder { + quit_tx: UnboundedSender<()>, + forwarding_task: Task<(UnboundedSender, UnboundedReceiver)>, +} + +impl ChannelForwarder { + fn new( + mut incoming_tx: UnboundedSender, + mut outgoing_rx: UnboundedReceiver, + cx: &mut AsyncAppContext, + ) -> (Self, UnboundedSender, UnboundedReceiver) { + let (quit_tx, mut quit_rx) = mpsc::unbounded::<()>(); + + let (proxy_incoming_tx, mut proxy_incoming_rx) = mpsc::unbounded::(); + let (mut proxy_outgoing_tx, proxy_outgoing_rx) = mpsc::unbounded::(); + + let forwarding_task = cx.background_executor().spawn(async move { + loop { + select_biased! { + _ = quit_rx.next().fuse() => { + break; + }, + incoming_envelope = proxy_incoming_rx.next().fuse() => { + if let Some(envelope) = incoming_envelope { + if incoming_tx.send(envelope).await.is_err() { + break; + } + } else { + break; + } + } + outgoing_envelope = outgoing_rx.next().fuse() => { + if let Some(envelope) = outgoing_envelope { + if proxy_outgoing_tx.send(envelope).await.is_err() { + break; + } + } else { + break; + } + } + } + } + + (incoming_tx, outgoing_rx) + }); + + ( + Self { + forwarding_task, + quit_tx, + }, + proxy_incoming_tx, + proxy_outgoing_rx, + ) + } + + async fn into_channels(mut self) -> (UnboundedSender, UnboundedReceiver) { + let _ = self.quit_tx.send(()).await; + self.forwarding_task.await + } +} + +struct SshRemoteClientState { + ssh_connection: SshRemoteConnection, + delegate: Arc, + forwarder: ChannelForwarder, + _multiplex_task: Task>, +} + +pub struct SshRemoteClient { + client: Arc, + inner_state: Arc>>, +} + +impl SshRemoteClient { + pub async fn new( connection_options: SshConnectionOptions, delegate: Arc, cx: &mut AsyncAppContext, ) -> Result> { - let client_state = SshClientState::new(connection_options, delegate.clone(), cx).await?; + let (outgoing_tx, outgoing_rx) = mpsc::unbounded::(); + let (incoming_tx, incoming_rx) = mpsc::unbounded::(); - let platform = client_state.query_platform().await?; - let (local_binary_path, version) = delegate.get_server_binary(platform, cx).await??; - let remote_binary_path = delegate.remote_server_binary_path(cx)?; - client_state - .ensure_server_binary( - &delegate, - &local_binary_path, - &remote_binary_path, - version, + let client = cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx))?; + let this = Arc::new(Self { + client, + inner_state: Arc::new(Mutex::new(None)), + }); + + let inner_state = { + let (proxy, proxy_incoming_tx, proxy_outgoing_rx) = + ChannelForwarder::new(incoming_tx, outgoing_rx, cx); + + let (ssh_connection, ssh_process) = + Self::establish_connection(connection_options.clone(), delegate.clone(), cx) + .await?; + + let multiplex_task = Self::multiplex( + this.clone(), + ssh_process, + proxy_incoming_tx, + proxy_outgoing_rx, cx, - ) - .await?; + ); - let (spawn_process_tx, mut spawn_process_rx) = mpsc::unbounded::(); - let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded::(); - let (incoming_tx, incoming_rx) = mpsc::unbounded::(); + SshRemoteClientState { + ssh_connection, + delegate, + forwarder: proxy, + _multiplex_task: multiplex_task, + } + }; - let socket = client_state.socket.clone(); - run_cmd(socket.ssh_command(&remote_binary_path).arg("version")).await?; + this.inner_state.lock().replace(inner_state); - let mut remote_server_child = socket - .ssh_command(format!( - "RUST_LOG={} RUST_BACKTRACE={} {:?} run", - std::env::var("RUST_LOG").unwrap_or_default(), - std::env::var("RUST_BACKTRACE").unwrap_or_default(), - remote_binary_path, - )) - .spawn() - .context("failed to spawn remote server")?; - let mut child_stderr = remote_server_child.stderr.take().unwrap(); - let mut child_stdout = remote_server_child.stdout.take().unwrap(); - let mut child_stdin = remote_server_child.stdin.take().unwrap(); + Ok(this) + } + + fn reconnect(this: Arc, cx: &mut AsyncAppContext) -> Result<()> { + let Some(state) = this.inner_state.lock().take() else { + return Err(anyhow!("reconnect is already in progress")); + }; + + let SshRemoteClientState { + mut ssh_connection, + delegate, + forwarder: proxy, + _multiplex_task, + } = state; + drop(_multiplex_task); + + cx.spawn(|mut cx| async move { + let (incoming_tx, outgoing_rx) = proxy.into_channels().await; + + ssh_connection.master_process.kill()?; + ssh_connection + .master_process + .status() + .await + .context("Failed to kill ssh process")?; + + let connection_options = ssh_connection.socket.connection_options.clone(); + + let (ssh_connection, ssh_process) = + Self::establish_connection(connection_options, delegate.clone(), &mut cx).await?; + + let (proxy, proxy_incoming_tx, proxy_outgoing_rx) = + ChannelForwarder::new(incoming_tx, outgoing_rx, &mut cx); + + let inner_state = SshRemoteClientState { + ssh_connection, + delegate, + forwarder: proxy, + _multiplex_task: Self::multiplex( + this.clone(), + ssh_process, + proxy_incoming_tx, + proxy_outgoing_rx, + &mut cx, + ), + }; + this.inner_state.lock().replace(inner_state); + + anyhow::Ok(()) + }) + .detach(); + + anyhow::Ok(()) + } + + fn multiplex( + this: Arc, + mut ssh_process: Child, + incoming_tx: UnboundedSender, + mut outgoing_rx: UnboundedReceiver, + cx: &mut AsyncAppContext, + ) -> Task> { + let mut child_stderr = ssh_process.stderr.take().unwrap(); + let mut child_stdout = ssh_process.stdout.take().unwrap(); + let mut child_stdin = ssh_process.stdin.take().unwrap(); let io_task = cx.background_executor().spawn(async move { let mut stdin_buffer = Vec::new(); @@ -194,27 +378,15 @@ impl SshSession { write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?; } - request = spawn_process_rx.next().fuse() => { - let Some(request) = request else { - return Ok(()); - }; - - log::info!("spawn process: {:?}", request.command); - let child = client_state.socket - .ssh_command(&request.command) - .spawn() - .context("failed to create channel")?; - request.process_tx.send(child).ok(); - } - result = child_stdout.read(&mut stdout_buffer).fuse() => { match result { Ok(0) => { child_stdin.close().await?; outgoing_rx.close(); - let status = remote_server_child.status().await?; + let status = ssh_process.status().await?; if !status.success() { - log::error!("channel exited with status: {status:?}"); + log::error!("ssh process exited with status: {status:?}"); + return Err(anyhow!("ssh process exited with non-zero status code: {:?}", status.code())); } return Ok(()); } @@ -267,239 +439,112 @@ impl SshSession { } }); - cx.update(|cx| { - Self::new( - incoming_rx, - outgoing_tx, - spawn_process_tx, - Some(socket), - Some(io_task), - cx, - ) - }) - } + cx.spawn(|mut cx| async move { + let result = io_task.await; - pub fn server( - incoming_rx: mpsc::UnboundedReceiver, - outgoing_tx: mpsc::UnboundedSender, - cx: &AppContext, - ) -> Arc { - let (tx, _rx) = mpsc::unbounded(); - Self::new(incoming_rx, outgoing_tx, tx, None, None, cx) - } - - #[cfg(any(test, feature = "test-support"))] - pub fn fake( - client_cx: &mut gpui::TestAppContext, - server_cx: &mut gpui::TestAppContext, - ) -> (Arc, Arc) { - let (server_to_client_tx, server_to_client_rx) = mpsc::unbounded(); - let (client_to_server_tx, client_to_server_rx) = mpsc::unbounded(); - let (tx, _rx) = mpsc::unbounded(); - ( - client_cx.update(|cx| { - Self::new( - server_to_client_rx, - client_to_server_tx, - tx.clone(), - None, // todo() - None, - cx, - ) - }), - server_cx.update(|cx| { - Self::new( - client_to_server_rx, - server_to_client_tx, - tx.clone(), - None, - None, - cx, - ) - }), - ) - } - - fn new( - mut incoming_rx: mpsc::UnboundedReceiver, - outgoing_tx: mpsc::UnboundedSender, - spawn_process_tx: mpsc::UnboundedSender, - client_socket: Option, - io_task: Option>>, - cx: &AppContext, - ) -> Arc { - let this = Arc::new(Self { - next_message_id: AtomicU32::new(0), - response_channels: ResponseChannels::default(), - outgoing_tx, - spawn_process_tx, - client_socket, - state: Default::default(), - _io_task: io_task, - }); - - cx.spawn(|cx| { - let this = Arc::downgrade(&this); - async move { - let peer_id = PeerId { owner_id: 0, id: 0 }; - while let Some(incoming) = incoming_rx.next().await { - let Some(this) = this.upgrade() else { - return anyhow::Ok(()); - }; - - if let Some(request_id) = incoming.responding_to { - let request_id = MessageId(request_id); - let sender = this.response_channels.lock().remove(&request_id); - if let Some(sender) = sender { - let (tx, rx) = oneshot::channel(); - if incoming.payload.is_some() { - sender.send((incoming, tx)).ok(); - } - rx.await.ok(); - } - } else if let Some(envelope) = - build_typed_envelope(peer_id, Instant::now(), incoming) - { - let type_name = envelope.payload_type_name(); - if let Some(future) = ProtoMessageHandlerSet::handle_message( - &this.state, - envelope, - this.clone().into(), - cx.clone(), - ) { - log::debug!("ssh message received. name:{type_name}"); - match future.await { - Ok(_) => { - log::debug!("ssh message handled. name:{type_name}"); - } - Err(error) => { - log::error!( - "error handling message. type:{type_name}, error:{error}", - ); - } - } - } else { - log::error!("unhandled ssh message name:{type_name}"); - } - } - } - anyhow::Ok(()) + if let Err(error) = result { + log::warn!("ssh io task died with error: {:?}. reconnecting...", error); + Self::reconnect(this, &mut cx).ok(); } - }) - .detach(); - - this - } - pub fn request( - &self, - payload: T, - ) -> impl 'static + Future> { - log::debug!("ssh request start. name:{}", T::NAME); - let response = self.request_dynamic(payload.into_envelope(0, None, None), T::NAME); - async move { - let response = response.await?; - log::debug!("ssh request finish. name:{}", T::NAME); - T::Response::from_envelope(response) - .ok_or_else(|| anyhow!("received a response of the wrong type")) - } - } - - pub fn send(&self, payload: T) -> Result<()> { - log::debug!("ssh send name:{}", T::NAME); - self.send_dynamic(payload.into_envelope(0, None, None)) + Ok(()) + }) } - pub fn request_dynamic( - &self, - mut envelope: proto::Envelope, - type_name: &'static str, - ) -> impl 'static + Future> { - envelope.id = self.next_message_id.fetch_add(1, SeqCst); - let (tx, rx) = oneshot::channel(); - let mut response_channels_lock = self.response_channels.lock(); - response_channels_lock.insert(MessageId(envelope.id), tx); - drop(response_channels_lock); - let result = self.outgoing_tx.unbounded_send(envelope); - async move { - if let Err(error) = &result { - log::error!("failed to send message: {}", error); - return Err(anyhow!("failed to send message: {}", error)); - } - - let response = rx.await.context("connection lost")?.0; - if let Some(proto::envelope::Payload::Error(error)) = &response.payload { - return Err(RpcError::from_proto(error, type_name)); - } - Ok(response) - } - } + async fn establish_connection( + connection_options: SshConnectionOptions, + delegate: Arc, + cx: &mut AsyncAppContext, + ) -> Result<(SshRemoteConnection, Child)> { + let ssh_connection = + SshRemoteConnection::new(connection_options, delegate.clone(), cx).await?; - pub fn send_dynamic(&self, mut envelope: proto::Envelope) -> Result<()> { - envelope.id = self.next_message_id.fetch_add(1, SeqCst); - self.outgoing_tx.unbounded_send(envelope)?; - Ok(()) - } + let platform = ssh_connection.query_platform().await?; + let (local_binary_path, version) = delegate.get_server_binary(platform, cx).await??; + let remote_binary_path = delegate.remote_server_binary_path(cx)?; + ssh_connection + .ensure_server_binary( + &delegate, + &local_binary_path, + &remote_binary_path, + version, + cx, + ) + .await?; - pub fn subscribe_to_entity(&self, remote_id: u64, entity: &Model) { - let id = (TypeId::of::(), remote_id); + let socket = ssh_connection.socket.clone(); + run_cmd(socket.ssh_command(&remote_binary_path).arg("version")).await?; - let mut state = self.state.lock(); - if state.entities_by_type_and_remote_id.contains_key(&id) { - panic!("already subscribed to entity"); - } + let ssh_process = socket + .ssh_command(format!( + "RUST_LOG={} RUST_BACKTRACE={} {:?} run", + std::env::var("RUST_LOG").unwrap_or_default(), + std::env::var("RUST_BACKTRACE").unwrap_or_default(), + remote_binary_path, + )) + .spawn() + .context("failed to spawn remote server")?; - state.entities_by_type_and_remote_id.insert( - id, - EntityMessageSubscriber::Entity { - handle: entity.downgrade().into(), - }, - ); + Ok((ssh_connection, ssh_process)) } - pub async fn spawn_process(&self, command: String) -> process::Child { - let (process_tx, process_rx) = oneshot::channel(); - self.spawn_process_tx - .unbounded_send(SpawnRequest { - command, - process_tx, - }) - .ok(); - process_rx.await.unwrap() + pub fn subscribe_to_entity(&self, remote_id: u64, entity: &Model) { + self.client.subscribe_to_entity(remote_id, entity); } - pub fn ssh_args(&self) -> Vec { - self.client_socket.as_ref().unwrap().ssh_args() + pub fn ssh_args(&self) -> Option> { + let state = self.inner_state.lock(); + state + .as_ref() + .map(|state| state.ssh_connection.socket.ssh_args()) } -} -impl ProtoClient for SshSession { - fn request( - &self, - envelope: proto::Envelope, - request_type: &'static str, - ) -> BoxFuture<'static, Result> { - self.request_dynamic(envelope, request_type).boxed() + pub fn to_proto_client(&self) -> AnyProtoClient { + self.client.clone().into() } - fn send(&self, envelope: proto::Envelope, _message_type: &'static str) -> Result<()> { - self.send_dynamic(envelope) - } + #[cfg(any(test, feature = "test-support"))] + pub fn fake( + client_cx: &mut gpui::TestAppContext, + server_cx: &mut gpui::TestAppContext, + ) -> (Arc, Arc) { + let (server_to_client_tx, server_to_client_rx) = mpsc::unbounded(); + let (client_to_server_tx, client_to_server_rx) = mpsc::unbounded(); - fn send_response(&self, envelope: Envelope, _message_type: &'static str) -> anyhow::Result<()> { - self.send_dynamic(envelope) + ( + client_cx.update(|cx| { + let client = ChannelClient::new(server_to_client_rx, client_to_server_tx, cx); + Arc::new(Self { + client, + inner_state: Arc::new(Mutex::new(None)), + }) + }), + server_cx.update(|cx| ChannelClient::new(client_to_server_rx, server_to_client_tx, cx)), + ) } +} - fn message_handler_set(&self) -> &Mutex { - &self.state +impl From for AnyProtoClient { + fn from(client: SshRemoteClient) -> Self { + AnyProtoClient::new(client.client.clone()) } +} - fn is_via_collab(&self) -> bool { - false +struct SshRemoteConnection { + socket: SshSocket, + master_process: process::Child, + _temp_dir: TempDir, +} + +impl Drop for SshRemoteConnection { + fn drop(&mut self) { + if let Err(error) = self.master_process.kill() { + log::error!("failed to kill SSH master process: {}", error); + } } } -impl SshClientState { +impl SshRemoteConnection { #[cfg(not(unix))] async fn new( _connection_options: SshConnectionOptions, @@ -740,74 +785,181 @@ impl SshClientState { } } -#[cfg(unix)] -async fn read_with_timeout( - stdout: &mut process::ChildStdout, - timeout: std::time::Duration, - output: &mut Vec, -) -> Result<(), std::io::Error> { - smol::future::or( - async { - stdout.read_to_end(output).await?; - Ok::<_, std::io::Error>(()) - }, - async { - smol::Timer::after(timeout).await; +type ResponseChannels = Mutex)>>>; - Err(std::io::Error::new( - std::io::ErrorKind::TimedOut, - "Read operation timed out", - )) - }, - ) - .await +pub struct ChannelClient { + next_message_id: AtomicU32, + outgoing_tx: mpsc::UnboundedSender, + response_channels: ResponseChannels, // Lock + message_handlers: Mutex, // Lock } -impl Drop for SshClientState { - fn drop(&mut self) { - if let Err(error) = self.master_process.kill() { - log::error!("failed to kill SSH master process: {}", error); +impl ChannelClient { + pub fn new( + incoming_rx: mpsc::UnboundedReceiver, + outgoing_tx: mpsc::UnboundedSender, + cx: &AppContext, + ) -> Arc { + let this = Arc::new(Self { + outgoing_tx, + next_message_id: AtomicU32::new(0), + response_channels: ResponseChannels::default(), + message_handlers: Default::default(), + }); + + Self::start_handling_messages(this.clone(), incoming_rx, cx); + + this + } + + fn start_handling_messages( + this: Arc, + mut incoming_rx: mpsc::UnboundedReceiver, + cx: &AppContext, + ) { + cx.spawn(|cx| { + let this = Arc::downgrade(&this); + async move { + let peer_id = PeerId { owner_id: 0, id: 0 }; + while let Some(incoming) = incoming_rx.next().await { + let Some(this) = this.upgrade() else { + return anyhow::Ok(()); + }; + + if let Some(request_id) = incoming.responding_to { + let request_id = MessageId(request_id); + let sender = this.response_channels.lock().remove(&request_id); + if let Some(sender) = sender { + let (tx, rx) = oneshot::channel(); + if incoming.payload.is_some() { + sender.send((incoming, tx)).ok(); + } + rx.await.ok(); + } + } else if let Some(envelope) = + build_typed_envelope(peer_id, Instant::now(), incoming) + { + let type_name = envelope.payload_type_name(); + if let Some(future) = ProtoMessageHandlerSet::handle_message( + &this.message_handlers, + envelope, + this.clone().into(), + cx.clone(), + ) { + log::debug!("ssh message received. name:{type_name}"); + match future.await { + Ok(_) => { + log::debug!("ssh message handled. name:{type_name}"); + } + Err(error) => { + log::error!( + "error handling message. type:{type_name}, error:{error}", + ); + } + } + } else { + log::error!("unhandled ssh message name:{type_name}"); + } + } + } + anyhow::Ok(()) + } + }) + .detach(); + } + + pub fn subscribe_to_entity(&self, remote_id: u64, entity: &Model) { + let id = (TypeId::of::(), remote_id); + + let mut message_handlers = self.message_handlers.lock(); + if message_handlers + .entities_by_type_and_remote_id + .contains_key(&id) + { + panic!("already subscribed to entity"); } + + message_handlers.entities_by_type_and_remote_id.insert( + id, + EntityMessageSubscriber::Entity { + handle: entity.downgrade().into(), + }, + ); } -} -impl SshSocket { - fn ssh_command>(&self, program: S) -> process::Command { - let mut command = process::Command::new("ssh"); - self.ssh_options(&mut command) - .arg(self.connection_options.ssh_url()) - .arg(program); - command + pub fn request( + &self, + payload: T, + ) -> impl 'static + Future> { + log::debug!("ssh request start. name:{}", T::NAME); + let response = self.request_dynamic(payload.into_envelope(0, None, None), T::NAME); + async move { + let response = response.await?; + log::debug!("ssh request finish. name:{}", T::NAME); + T::Response::from_envelope(response) + .ok_or_else(|| anyhow!("received a response of the wrong type")) + } } - fn ssh_options<'a>(&self, command: &'a mut process::Command) -> &'a mut process::Command { - command - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .args(["-o", "ControlMaster=no", "-o"]) - .arg(format!("ControlPath={}", self.socket_path.display())) + pub fn send(&self, payload: T) -> Result<()> { + log::debug!("ssh send name:{}", T::NAME); + self.send_dynamic(payload.into_envelope(0, None, None)) } - fn ssh_args(&self) -> Vec { - vec![ - "-o".to_string(), - "ControlMaster=no".to_string(), - "-o".to_string(), - format!("ControlPath={}", self.socket_path.display()), - self.connection_options.ssh_url(), - ] + pub fn request_dynamic( + &self, + mut envelope: proto::Envelope, + type_name: &'static str, + ) -> impl 'static + Future> { + envelope.id = self.next_message_id.fetch_add(1, SeqCst); + let (tx, rx) = oneshot::channel(); + let mut response_channels_lock = self.response_channels.lock(); + response_channels_lock.insert(MessageId(envelope.id), tx); + drop(response_channels_lock); + let result = self.outgoing_tx.unbounded_send(envelope); + async move { + if let Err(error) = &result { + log::error!("failed to send message: {}", error); + return Err(anyhow!("failed to send message: {}", error)); + } + + let response = rx.await.context("connection lost")?.0; + if let Some(proto::envelope::Payload::Error(error)) = &response.payload { + return Err(RpcError::from_proto(error, type_name)); + } + Ok(response) + } + } + + pub fn send_dynamic(&self, mut envelope: proto::Envelope) -> Result<()> { + envelope.id = self.next_message_id.fetch_add(1, SeqCst); + self.outgoing_tx.unbounded_send(envelope)?; + Ok(()) } } -async fn run_cmd(command: &mut process::Command) -> Result { - let output = command.output().await?; - if output.status.success() { - Ok(String::from_utf8_lossy(&output.stdout).to_string()) - } else { - Err(anyhow!( - "failed to run command: {}", - String::from_utf8_lossy(&output.stderr) - )) +impl ProtoClient for ChannelClient { + fn request( + &self, + envelope: proto::Envelope, + request_type: &'static str, + ) -> BoxFuture<'static, Result> { + self.request_dynamic(envelope, request_type).boxed() + } + + fn send(&self, envelope: proto::Envelope, _message_type: &'static str) -> Result<()> { + self.send_dynamic(envelope) + } + + fn send_response(&self, envelope: Envelope, _message_type: &'static str) -> anyhow::Result<()> { + self.send_dynamic(envelope) + } + + fn message_handler_set(&self) -> &Mutex { + &self.message_handlers + } + + fn is_via_collab(&self) -> bool { + false } } diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 4b13938d8ca2f5..39540b04e04cb8 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -10,7 +10,7 @@ use project::{ worktree_store::WorktreeStore, LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId, }; -use remote::SshSession; +use remote::ssh_session::ChannelClient; use rpc::{ proto::{self, SSH_PEER_ID, SSH_PROJECT_ID}, AnyProtoClient, TypedEnvelope, @@ -41,7 +41,7 @@ impl HeadlessProject { project::Project::init_settings(cx); } - pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { + pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); let node_runtime = NodeRuntime::unavailable(); diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 908a0a89b6273b..73b8a91da1876f 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -6,7 +6,6 @@ use gpui::Context as _; use remote::{ json_log::LogRecord, protocol::{read_message, write_message}, - SshSession, }; use remote_server::HeadlessProject; use smol::{io::AsyncWriteExt, stream::StreamExt as _, Async}; @@ -24,6 +23,8 @@ fn main() { #[cfg(not(windows))] fn main() { + use remote::ssh_session::ChannelClient; + env_logger::builder() .format(|buf, record| { serde_json::to_writer(&mut *buf, &LogRecord::new(record))?; @@ -55,7 +56,7 @@ fn main() { let mut stdin = Async::new(io::stdin()).unwrap(); let mut stdout = Async::new(io::stdout()).unwrap(); - let session = SshSession::server(incoming_rx, outgoing_tx, cx); + let session = ChannelClient::new(incoming_rx, outgoing_tx, cx); let project = cx.new_model(|cx| { HeadlessProject::new( session.clone(), diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 892063942754c1..960b7c248c0e26 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -15,7 +15,7 @@ use project::{ search::{SearchQuery, SearchResult}, Project, ProjectPath, }; -use remote::SshSession; +use remote::SshRemoteClient; use serde_json::json; use settings::{Settings, SettingsLocation, SettingsStore}; use smol::stream::StreamExt; @@ -616,7 +616,7 @@ async fn init_test( cx: &mut TestAppContext, server_cx: &mut TestAppContext, ) -> (Model, Model, Arc) { - let (client_ssh, server_ssh) = SshSession::fake(cx, server_cx); + let (ssh_remote_client, ssh_server_client) = SshRemoteClient::fake(cx, server_cx); init_logger(); let fs = FakeFs::new(server_cx.executor()); @@ -642,8 +642,9 @@ async fn init_test( ); server_cx.update(HeadlessProject::init); - let headless = server_cx.new_model(|cx| HeadlessProject::new(server_ssh, fs.clone(), cx)); - let project = build_project(client_ssh, cx); + let headless = + server_cx.new_model(|cx| HeadlessProject::new(ssh_server_client, fs.clone(), cx)); + let project = build_project(ssh_remote_client, cx); project .update(cx, { @@ -654,7 +655,7 @@ async fn init_test( (project, headless, fs) } -fn build_project(ssh: Arc, cx: &mut TestAppContext) -> Model { +fn build_project(ssh: Arc, cx: &mut TestAppContext) -> Model { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index cec913851f04df..b668a5802c3b14 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -61,7 +61,7 @@ use postage::stream::Stream; use project::{ DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, }; -use remote::{SshConnectionOptions, SshSession}; +use remote::{SshConnectionOptions, SshRemoteClient}; use serde::Deserialize; use session::AppSession; use settings::{InvalidSettingsError, Settings}; @@ -5514,7 +5514,7 @@ pub fn join_hosted_project( pub fn open_ssh_project( window: WindowHandle, connection_options: SshConnectionOptions, - session: Arc, + session: Arc, app_state: Arc, paths: Vec, cx: &mut AppContext, From 6336248c1a90d23f08678bf6d30fd84bc1638625 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Tue, 1 Oct 2024 18:58:40 +0800 Subject: [PATCH 421/762] windows: Revert "Fix `hide`, `activate` method on Windows to hide/show application" (#18571) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR reverts the changes introduced via #18164. As shown in the video below, once you `hide` the app, there is essentially no way to bring it back. I must emphasize that the window logic on Windows is entirely different from macOS. On macOS, when you `hide` an app, its icon always remains visible in the dock, and you can always bring the hidden app back by clicking that icon. However, on Windows, there is no such mechanism—the app is literally hidden. I think the `hide` feature should be macOS-only. https://github.com/user-attachments/assets/65c8a007-eedb-4444-9499-787b50f2d1e9 Release Notes: - N/A --- crates/gpui/src/platform/windows/platform.rs | 24 ++------------------ 1 file changed, 2 insertions(+), 22 deletions(-) diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 7f6677973b2fbd..e90b2c6ef1bedc 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -33,8 +33,6 @@ use crate::*; pub(crate) struct WindowsPlatform { state: RefCell, raw_window_handles: RwLock>, - // The window handles that are hided by `hide` method. - hidden_windows: RwLock>, // The below members will never change throughout the entire lifecycle of the app. icon: HICON, main_receiver: flume::Receiver, @@ -102,7 +100,6 @@ impl WindowsPlatform { Self { state, raw_window_handles, - hidden_windows: RwLock::new(SmallVec::new()), icon, main_receiver, dispatch_event, @@ -298,26 +295,9 @@ impl Platform for WindowsPlatform { } } - fn activate(&self, _ignoring_other_apps: bool) { - let mut state = self.hidden_windows.write(); - state.iter().for_each(|handle| unsafe { - ShowWindow(*handle, SW_SHOW).ok().log_err(); - }); - state.clear(); - } + fn activate(&self, _ignoring_other_apps: bool) {} - fn hide(&self) { - let mut state = self.hidden_windows.write(); - self.raw_window_handles - .read() - .iter() - .for_each(|handle| unsafe { - if IsWindowVisible(*handle).as_bool() { - state.push(*handle); - ShowWindow(*handle, SW_HIDE).ok().log_err(); - } - }); - } + fn hide(&self) {} // todo(windows) fn hide_other_apps(&self) { From 1be24f77396aaade46739218e70cb059491aaedf Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 1 Oct 2024 09:31:03 -0400 Subject: [PATCH 422/762] Rename proto language to Proto (#18559) All the other languages are capitalized. Proto should be too. --- crates/languages/src/proto/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/proto/config.toml b/crates/languages/src/proto/config.toml index b8bccfd39b6a39..6d25c23da5dfaa 100644 --- a/crates/languages/src/proto/config.toml +++ b/crates/languages/src/proto/config.toml @@ -1,4 +1,4 @@ -name = "proto" +name = "Proto" grammar = "proto" path_suffixes = ["proto"] line_comments = ["// "] From 68d6177d370defa04be8f989240ab9e8a8e8c79f Mon Sep 17 00:00:00 2001 From: pantheraleo-7 <159872817+pantheraleo-7@users.noreply.github.com> Date: Tue, 1 Oct 2024 20:39:34 +0530 Subject: [PATCH 423/762] docs: Correct typo in `configuring-zed.md` (#18580) Release Notes: - N/A --- docs/src/configuring-zed.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index ad6a628ed0880f..1e531f7c744d18 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1736,7 +1736,7 @@ See Buffer Font Features ## Terminal: Detect Virtual Environments {#terminal-detect_venv} -- Description: Activate the [Python Virtual Environment](https://docs.python.org/3/library/venv.html), if one is found, in the terminal's working directory (as resolved by the working_directory and automatically activating the virtual environemtn +- Description: Activate the [Python Virtual Environment](https://docs.python.org/3/library/venv.html), if one is found, in the terminal's working directory (as resolved by the working_directory and automatically activating the virtual environment. - Setting: `detect_venv` - Default: From 051627c4493b5f5446cb3957576be89899b1d386 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 1 Oct 2024 18:32:16 +0300 Subject: [PATCH 424/762] Project panel horizontal scrollbar (#18513) image https://github.com/user-attachments/assets/734f1f52-70d9-4308-b1fc-36c7cfd4dd76 Closes https://github.com/zed-industries/zed/issues/7001 Closes https://github.com/zed-industries/zed/issues/4427 Part of https://github.com/zed-industries/zed/issues/15324 Part of https://github.com/zed-industries/zed/issues/14551 * Adjusts a `UniformList` to have a horizontal sizing behavior: the old mode forced all items to have the size of the list exactly. A new mode (with corresponding `ListItems` having `overflow_x` enabled) lays out the uniform list elements with width of its widest element, setting the same width to the list itself too. * Using the new behavior, adds a new scrollbar into the project panel and enhances its file name editor to scroll it during editing of long file names * Also restyles the scrollbar a bit, making it narrower and removing its background * Changes the project_panel.scrollbar.show settings to accept `null` and be `null` by default, to inherit `editor`'s scrollbar settings. All editor scrollbar settings are supported now. Release Notes: - Added a horizontal scrollbar to project panel ([#7001](https://github.com/zed-industries/zed/issues/7001)) ([#4427](https://github.com/zed-industries/zed/issues/4427)) --------- Co-authored-by: Piotr Osiewicz --- assets/settings/default.json | 14 +- crates/editor/src/editor.rs | 2 +- crates/gpui/src/elements/div.rs | 19 +- crates/gpui/src/elements/list.rs | 10 + crates/gpui/src/elements/uniform_list.rs | 96 ++++- crates/gpui/src/style.rs | 3 + crates/language/src/language_settings.rs | 2 +- crates/project_panel/src/project_panel.rs | 335 +++++++++++++++--- .../src/project_panel_settings.rs | 22 +- crates/project_panel/src/scrollbar.rs | 192 +++++++--- crates/ui/src/components/list/list_item.rs | 15 +- docs/src/configuring-zed.md | 6 +- 12 files changed, 567 insertions(+), 149 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index f6c498e0278e6a..133ff9451d027b 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -356,9 +356,19 @@ /// Scrollbar-related settings "scrollbar": { /// When to show the scrollbar in the project panel. + /// This setting can take four values: /// - /// Default: always - "show": "always" + /// 1. null (default): Inherit editor settings + /// 2. Show the scrollbar if there's important information or + /// follow the system's configured behavior (default): + /// "auto" + /// 3. Match the system's configured behavior: + /// "system" + /// 4. Always show the scrollbar: + /// "always" + /// 5. Never show the scrollbar: + /// "never" + "show": null } }, "outline_panel": { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d1ca70f705ce56..61a47d7f631fd8 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -61,7 +61,7 @@ use debounced_delay::DebouncedDelay; use display_map::*; pub use display_map::{DisplayPoint, FoldPlaceholder}; pub use editor_settings::{ - CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, + CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine, SearchSettings, ShowScrollbar, }; pub use editor_settings_controls::*; use element::LineWithInvisibles; diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 4e92f7f82c4cb4..284e574627631f 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -2057,6 +2057,7 @@ impl Interactivity { fn paint_scroll_listener(&self, hitbox: &Hitbox, style: &Style, cx: &mut WindowContext) { if let Some(scroll_offset) = self.scroll_offset.clone() { let overflow = style.overflow; + let allow_concurrent_scroll = style.allow_concurrent_scroll; let line_height = cx.line_height(); let hitbox = hitbox.clone(); cx.on_mouse_event(move |event: &ScrollWheelEvent, phase, cx| { @@ -2065,27 +2066,31 @@ impl Interactivity { let old_scroll_offset = *scroll_offset; let delta = event.delta.pixel_delta(line_height); + let mut delta_x = Pixels::ZERO; if overflow.x == Overflow::Scroll { - let mut delta_x = Pixels::ZERO; if !delta.x.is_zero() { delta_x = delta.x; } else if overflow.y != Overflow::Scroll { delta_x = delta.y; } - - scroll_offset.x += delta_x; } - + let mut delta_y = Pixels::ZERO; if overflow.y == Overflow::Scroll { - let mut delta_y = Pixels::ZERO; if !delta.y.is_zero() { delta_y = delta.y; } else if overflow.x != Overflow::Scroll { delta_y = delta.x; } - - scroll_offset.y += delta_y; } + if !allow_concurrent_scroll && !delta_x.is_zero() && !delta_y.is_zero() { + if delta_x.abs() > delta_y.abs() { + delta_y = Pixels::ZERO; + } else { + delta_x = Pixels::ZERO; + } + } + scroll_offset.y += delta_y; + scroll_offset.x += delta_x; cx.stop_propagation(); if *scroll_offset != old_scroll_offset { diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index 6ac6d2a9bf8d7d..d77c91e6552d31 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -89,6 +89,16 @@ pub enum ListSizingBehavior { Auto, } +/// The horizontal sizing behavior to apply during layout. +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum ListHorizontalSizingBehavior { + /// List items' width can never exceed the width of the list. + #[default] + FitList, + /// List items' width may go over the width of the list, if any item is wider. + Unconstrained, +} + struct LayoutItemsResponse { max_item_width: Pixels, scroll_top: ListOffset, diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 4dc2f5335deddd..54297d1214f3f9 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -5,8 +5,8 @@ //! elements with uniform height. use crate::{ - point, px, size, AnyElement, AvailableSpace, Bounds, ContentMask, Element, ElementId, - GlobalElementId, Hitbox, InteractiveElement, Interactivity, IntoElement, LayoutId, + point, size, AnyElement, AvailableSpace, Bounds, ContentMask, Element, ElementId, + GlobalElementId, Hitbox, InteractiveElement, Interactivity, IntoElement, IsZero, LayoutId, ListSizingBehavior, Pixels, Render, ScrollHandle, Size, StyleRefinement, Styled, View, ViewContext, WindowContext, }; @@ -14,6 +14,8 @@ use smallvec::SmallVec; use std::{cell::RefCell, cmp, ops::Range, rc::Rc}; use taffy::style::Overflow; +use super::ListHorizontalSizingBehavior; + /// uniform_list provides lazy rendering for a set of items that are of uniform height. /// When rendered into a container with overflow-y: hidden and a fixed (or max) height, /// uniform_list will only render the visible subset of items. @@ -57,6 +59,7 @@ where }, scroll_handle: None, sizing_behavior: ListSizingBehavior::default(), + horizontal_sizing_behavior: ListHorizontalSizingBehavior::default(), } } @@ -69,11 +72,11 @@ pub struct UniformList { interactivity: Interactivity, scroll_handle: Option, sizing_behavior: ListSizingBehavior, + horizontal_sizing_behavior: ListHorizontalSizingBehavior, } /// Frame state used by the [UniformList]. pub struct UniformListFrameState { - item_size: Size, items: SmallVec<[AnyElement; 32]>, } @@ -87,7 +90,18 @@ pub struct UniformListScrollHandle(pub Rc>); pub struct UniformListScrollState { pub base_handle: ScrollHandle, pub deferred_scroll_to_item: Option, - pub last_item_height: Option, + /// Size of the item, captured during last layout. + pub last_item_size: Option, +} + +#[derive(Copy, Clone, Debug, Default)] +/// The size of the item and its contents. +pub struct ItemSize { + /// The size of the item. + pub item: Size, + /// The size of the item's contents, which may be larger than the item itself, + /// if the item was bounded by a parent element. + pub contents: Size, } impl UniformListScrollHandle { @@ -96,7 +110,7 @@ impl UniformListScrollHandle { Self(Rc::new(RefCell::new(UniformListScrollState { base_handle: ScrollHandle::new(), deferred_scroll_to_item: None, - last_item_height: None, + last_item_size: None, }))) } @@ -170,7 +184,6 @@ impl Element for UniformList { ( layout_id, UniformListFrameState { - item_size, items: SmallVec::new(), }, ) @@ -193,17 +206,30 @@ impl Element for UniformList { - point(border.right + padding.right, border.bottom + padding.bottom), ); + let can_scroll_horizontally = matches!( + self.horizontal_sizing_behavior, + ListHorizontalSizingBehavior::Unconstrained + ); + + let longest_item_size = self.measure_item(None, cx); + let content_width = if can_scroll_horizontally { + padded_bounds.size.width.max(longest_item_size.width) + } else { + padded_bounds.size.width + }; let content_size = Size { - width: padded_bounds.size.width, - height: frame_state.item_size.height * self.item_count + padding.top + padding.bottom, + width: content_width, + height: longest_item_size.height * self.item_count + padding.top + padding.bottom, }; let shared_scroll_offset = self.interactivity.scroll_offset.clone().unwrap(); - - let item_height = self.measure_item(Some(padded_bounds.size.width), cx).height; + let item_height = longest_item_size.height; let shared_scroll_to_item = self.scroll_handle.as_mut().and_then(|handle| { let mut handle = handle.0.borrow_mut(); - handle.last_item_height = Some(item_height); + handle.last_item_size = Some(ItemSize { + item: padded_bounds.size, + contents: content_size, + }); handle.deferred_scroll_to_item.take() }); @@ -228,12 +254,19 @@ impl Element for UniformList { if self.item_count > 0 { let content_height = item_height * self.item_count + padding.top + padding.bottom; - let min_scroll_offset = padded_bounds.size.height - content_height; - let is_scrolled = scroll_offset.y != px(0.); + let is_scrolled_vertically = !scroll_offset.y.is_zero(); + let min_vertical_scroll_offset = padded_bounds.size.height - content_height; + if is_scrolled_vertically && scroll_offset.y < min_vertical_scroll_offset { + shared_scroll_offset.borrow_mut().y = min_vertical_scroll_offset; + scroll_offset.y = min_vertical_scroll_offset; + } - if is_scrolled && scroll_offset.y < min_scroll_offset { - shared_scroll_offset.borrow_mut().y = min_scroll_offset; - scroll_offset.y = min_scroll_offset; + let content_width = content_size.width + padding.left + padding.right; + let is_scrolled_horizontally = + can_scroll_horizontally && !scroll_offset.x.is_zero(); + if is_scrolled_horizontally && content_width <= padded_bounds.size.width { + shared_scroll_offset.borrow_mut().x = Pixels::ZERO; + scroll_offset.x = Pixels::ZERO; } if let Some(ix) = shared_scroll_to_item { @@ -263,9 +296,17 @@ impl Element for UniformList { cx.with_content_mask(Some(content_mask), |cx| { for (mut item, ix) in items.into_iter().zip(visible_range) { let item_origin = padded_bounds.origin - + point(px(0.), item_height * ix + scroll_offset.y + padding.top); + + point( + scroll_offset.x + padding.left, + item_height * ix + scroll_offset.y + padding.top, + ); + let available_width = if can_scroll_horizontally { + padded_bounds.size.width + scroll_offset.x.abs() + } else { + padded_bounds.size.width + }; let available_space = size( - AvailableSpace::Definite(padded_bounds.size.width), + AvailableSpace::Definite(available_width), AvailableSpace::Definite(item_height), ); item.layout_as_root(available_space, cx); @@ -318,6 +359,25 @@ impl UniformList { self } + /// Sets the horizontal sizing behavior, controlling the way list items laid out horizontally. + /// With [`ListHorizontalSizingBehavior::Unconstrained`] behavior, every item and the list itself will + /// have the size of the widest item and lay out pushing the `end_slot` to the right end. + pub fn with_horizontal_sizing_behavior( + mut self, + behavior: ListHorizontalSizingBehavior, + ) -> Self { + self.horizontal_sizing_behavior = behavior; + match behavior { + ListHorizontalSizingBehavior::FitList => { + self.interactivity.base_style.overflow.x = None; + } + ListHorizontalSizingBehavior::Unconstrained => { + self.interactivity.base_style.overflow.x = Some(Overflow::Scroll); + } + } + self + } + fn measure_item(&self, list_width: Option, cx: &mut WindowContext) -> Size { if self.item_count == 0 { return Size::default(); diff --git a/crates/gpui/src/style.rs b/crates/gpui/src/style.rs index c3148fcfa8b4bd..455a2e162d563e 100644 --- a/crates/gpui/src/style.rs +++ b/crates/gpui/src/style.rs @@ -156,6 +156,8 @@ pub struct Style { pub overflow: Point, /// How much space (in points) should be reserved for the scrollbars of `Overflow::Scroll` and `Overflow::Auto` nodes. pub scrollbar_width: f32, + /// Whether both x and y axis should be scrollable at the same time. + pub allow_concurrent_scroll: bool, // Position properties /// What should the `position` value of this struct use as a base offset? @@ -667,6 +669,7 @@ impl Default for Style { x: Overflow::Visible, y: Overflow::Visible, }, + allow_concurrent_scroll: false, scrollbar_width: 0.0, position: Position::Relative, inset: Edges::auto(), diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index d610ab09865ce3..de37e52290bf46 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -381,7 +381,7 @@ pub struct FeaturesContent { pub enum SoftWrap { /// Prefer a single line generally, unless an overly long line is encountered. None, - /// Deprecated: use None instead. Left to avoid breakin existing users' configs. + /// Deprecated: use None instead. Left to avoid breaking existing users' configs. /// Prefer a single line generally, unless an overly long line is encountered. PreferLine, /// Soft wrap lines that exceed the editor width. diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 6958bfb3318e23..53b274ee6fa964 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -8,20 +8,22 @@ use db::kvp::KEY_VALUE_STORE; use editor::{ items::entry_git_aware_label_color, scroll::{Autoscroll, ScrollbarAutoHide}, - Editor, + Editor, EditorEvent, EditorSettings, ShowScrollbar, }; use file_icons::FileIcons; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, Context as _, Result}; use collections::{hash_map, BTreeSet, HashMap}; +use core::f32; use git::repository::GitFileStatus; use gpui::{ actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, DragMoveEvent, - EventEmitter, ExternalPaths, FocusHandle, FocusableView, InteractiveElement, KeyContext, - ListSizingBehavior, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, - PromptLevel, Render, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, - ViewContext, VisualContext as _, WeakView, WindowContext, + Entity, EventEmitter, ExternalPaths, FocusHandle, FocusableView, InteractiveElement, + KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior, Model, MouseButton, + MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, Render, Stateful, Styled, + Subscription, Task, UniformListScrollHandle, View, ViewContext, VisualContext as _, WeakView, + WindowContext, }; use indexmap::IndexMap; use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrev}; @@ -29,7 +31,7 @@ use project::{ relativize_path, Entry, EntryKind, Fs, Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId, }; -use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings, ShowScrollbar}; +use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings}; use serde::{Deserialize, Serialize}; use std::{ cell::{Cell, OnceCell}, @@ -80,8 +82,10 @@ pub struct ProjectPanel { width: Option, pending_serialization: Task>, show_scrollbar: bool, - scrollbar_drag_thumb_offset: Rc>>, + vertical_scrollbar_drag_thumb_offset: Rc>>, + horizontal_scrollbar_drag_thumb_offset: Rc>>, hide_scrollbar_task: Option>, + max_width_item_index: Option, } #[derive(Clone, Debug)] @@ -90,6 +94,8 @@ struct EditState { entry_id: ProjectEntryId, is_new_entry: bool, is_dir: bool, + is_symlink: bool, + depth: usize, processing_filename: Option, } @@ -254,23 +260,26 @@ impl ProjectPanel { let filename_editor = cx.new_view(Editor::single_line); - cx.subscribe(&filename_editor, |this, _, event, cx| match event { - editor::EditorEvent::BufferEdited - | editor::EditorEvent::SelectionsChanged { .. } => { - this.autoscroll(cx); - } - editor::EditorEvent::Blurred => { - if this - .edit_state - .as_ref() - .map_or(false, |state| state.processing_filename.is_none()) - { - this.edit_state = None; - this.update_visible_entries(None, cx); + cx.subscribe( + &filename_editor, + |project_panel, _, editor_event, cx| match editor_event { + EditorEvent::BufferEdited | EditorEvent::SelectionsChanged { .. } => { + project_panel.autoscroll(cx); } - } - _ => {} - }) + EditorEvent::Blurred => { + if project_panel + .edit_state + .as_ref() + .map_or(false, |state| state.processing_filename.is_none()) + { + project_panel.edit_state = None; + project_panel.update_visible_entries(None, cx); + cx.notify(); + } + } + _ => {} + }, + ) .detach(); cx.observe_global::(|_, cx| { @@ -311,7 +320,9 @@ impl ProjectPanel { pending_serialization: Task::ready(None), show_scrollbar: !Self::should_autohide_scrollbar(cx), hide_scrollbar_task: None, - scrollbar_drag_thumb_offset: Default::default(), + vertical_scrollbar_drag_thumb_offset: Default::default(), + horizontal_scrollbar_drag_thumb_offset: Default::default(), + max_width_item_index: None, }; this.update_visible_entries(None, cx); @@ -827,7 +838,7 @@ impl ProjectPanel { Some(cx.spawn(|project_panel, mut cx| async move { let new_entry = edit_task.await; project_panel.update(&mut cx, |project_panel, cx| { - project_panel.edit_state.take(); + project_panel.edit_state = None; cx.notify(); })?; @@ -970,6 +981,8 @@ impl ProjectPanel { is_new_entry: true, is_dir, processing_filename: None, + is_symlink: false, + depth: 0, }); self.filename_editor.update(cx, |editor, cx| { editor.clear(cx); @@ -992,6 +1005,7 @@ impl ProjectPanel { leaf_entry_id } } + fn rename(&mut self, _: &Rename, cx: &mut ViewContext) { if let Some(SelectedEntry { worktree_id, @@ -1007,6 +1021,8 @@ impl ProjectPanel { is_new_entry: false, is_dir: entry.is_dir(), processing_filename: None, + is_symlink: entry.is_symlink, + depth: 0, }); let file_name = entry .path @@ -1750,6 +1766,7 @@ impl ProjectPanel { let old_ancestors = std::mem::take(&mut self.ancestors); self.visible_entries.clear(); + let mut max_width_item = None; for worktree in project.visible_worktrees(cx) { let snapshot = worktree.read(cx).snapshot(); let worktree_id = snapshot.id(); @@ -1805,6 +1822,12 @@ impl ProjectPanel { .get(&entry.id) .map(|ancestor| ancestor.current_ancestor_depth) .unwrap_or_default(); + if let Some(edit_state) = &mut self.edit_state { + if edit_state.entry_id == entry.id { + edit_state.is_symlink = entry.is_symlink; + edit_state.depth = depth; + } + } let mut ancestors = std::mem::take(&mut auto_folded_ancestors); if ancestors.len() > 1 { ancestors.reverse(); @@ -1837,6 +1860,78 @@ impl ProjectPanel { is_fifo: entry.is_fifo, }); } + let worktree_abs_path = worktree.read(cx).abs_path(); + let (depth, path) = if Some(entry) == worktree.read(cx).root_entry() { + let Some(path_name) = worktree_abs_path + .file_name() + .with_context(|| { + format!("Worktree abs path has no file name, root entry: {entry:?}") + }) + .log_err() + else { + continue; + }; + let path = Arc::from(Path::new(path_name)); + let depth = 0; + (depth, path) + } else if entry.is_file() { + let Some(path_name) = entry + .path + .file_name() + .with_context(|| format!("Non-root entry has no file name: {entry:?}")) + .log_err() + else { + continue; + }; + let path = Arc::from(Path::new(path_name)); + let depth = entry.path.ancestors().count() - 1; + (depth, path) + } else { + let path = self + .ancestors + .get(&entry.id) + .and_then(|ancestors| { + let outermost_ancestor = ancestors.ancestors.last()?; + let root_folded_entry = worktree + .read(cx) + .entry_for_id(*outermost_ancestor)? + .path + .as_ref(); + entry + .path + .strip_prefix(root_folded_entry) + .ok() + .and_then(|suffix| { + let full_path = Path::new(root_folded_entry.file_name()?); + Some(Arc::::from(full_path.join(suffix))) + }) + }) + .unwrap_or_else(|| entry.path.clone()); + let depth = path + .strip_prefix(worktree_abs_path) + .map(|suffix| suffix.components().count()) + .unwrap_or_default(); + (depth, path) + }; + let width_estimate = item_width_estimate( + depth, + path.to_string_lossy().chars().count(), + entry.is_symlink, + ); + + match max_width_item.as_mut() { + Some((id, worktree_id, width)) => { + if *width < width_estimate { + *id = entry.id; + *worktree_id = worktree.read(cx).id(); + *width = width_estimate; + } + } + None => { + max_width_item = Some((entry.id, worktree.read(cx).id(), width_estimate)) + } + } + if expanded_dir_ids.binary_search(&entry.id).is_err() && entry_iter.advance_to_sibling() { @@ -1851,6 +1946,22 @@ impl ProjectPanel { .push((worktree_id, visible_worktree_entries, OnceCell::new())); } + if let Some((project_entry_id, worktree_id, _)) = max_width_item { + let mut visited_worktrees_length = 0; + let index = self.visible_entries.iter().find_map(|(id, entries, _)| { + if worktree_id == *id { + entries + .iter() + .position(|entry| entry.id == project_entry_id) + } else { + visited_worktrees_length += entries.len(); + None + } + }); + if let Some(index) = index { + self.max_width_item_index = Some(visited_worktrees_length + index); + } + } if let Some((worktree_id, entry_id)) = new_selected_entry { self.selection = Some(SelectedEntry { worktree_id, @@ -2474,7 +2585,8 @@ impl ProjectPanel { cx.stop_propagation(); this.deploy_context_menu(event.position, entry_id, cx); }, - )), + )) + .overflow_x(), ) .border_1() .border_r_2() @@ -2498,22 +2610,19 @@ impl ProjectPanel { ) } - fn render_scrollbar( - &self, - items_count: usize, - cx: &mut ViewContext, - ) -> Option> { - let settings = ProjectPanelSettings::get_global(cx); - if settings.scrollbar.show == ShowScrollbar::Never { + fn render_vertical_scrollbar(&self, cx: &mut ViewContext) -> Option> { + if !Self::should_show_scrollbar(cx) { return None; } let scroll_handle = self.scroll_handle.0.borrow(); - - let height = scroll_handle - .last_item_height - .filter(|_| self.show_scrollbar || self.scrollbar_drag_thumb_offset.get().is_some())?; - - let total_list_length = height.0 as f64 * items_count as f64; + let total_list_length = scroll_handle + .last_item_size + .filter(|_| { + self.show_scrollbar || self.vertical_scrollbar_drag_thumb_offset.get().is_some() + })? + .contents + .height + .0 as f64; let current_offset = scroll_handle.base_handle.offset().y.0.min(0.).abs() as f64; let mut percentage = current_offset / total_list_length; let end_offset = (current_offset + scroll_handle.base_handle.bounds().size.height.0 as f64) @@ -2536,7 +2645,7 @@ impl ProjectPanel { Some( div() .occlude() - .id("project-panel-scroll") + .id("project-panel-vertical-scroll") .on_mouse_move(cx.listener(|_, _, cx| { cx.notify(); cx.stop_propagation() @@ -2550,7 +2659,7 @@ impl ProjectPanel { .on_mouse_up( MouseButton::Left, cx.listener(|this, _, cx| { - if this.scrollbar_drag_thumb_offset.get().is_none() + if this.vertical_scrollbar_drag_thumb_offset.get().is_none() && !this.focus_handle.contains_focused(cx) { this.hide_scrollbar(cx); @@ -2565,21 +2674,101 @@ impl ProjectPanel { })) .h_full() .absolute() - .right_0() - .top_0() - .bottom_0() + .right_1() + .top_1() + .bottom_1() .w(px(12.)) .cursor_default() - .child(ProjectPanelScrollbar::new( + .child(ProjectPanelScrollbar::vertical( percentage as f32..end_offset as f32, self.scroll_handle.clone(), - self.scrollbar_drag_thumb_offset.clone(), - cx.view().clone().into(), - items_count, + self.vertical_scrollbar_drag_thumb_offset.clone(), + cx.view().entity_id(), )), ) } + fn render_horizontal_scrollbar(&self, cx: &mut ViewContext) -> Option> { + if !Self::should_show_scrollbar(cx) { + return None; + } + let scroll_handle = self.scroll_handle.0.borrow(); + let longest_item_width = scroll_handle + .last_item_size + .filter(|_| { + self.show_scrollbar || self.horizontal_scrollbar_drag_thumb_offset.get().is_some() + }) + .filter(|size| size.contents.width > size.item.width)? + .contents + .width + .0 as f64; + let current_offset = scroll_handle.base_handle.offset().x.0.min(0.).abs() as f64; + let mut percentage = current_offset / longest_item_width; + let end_offset = (current_offset + scroll_handle.base_handle.bounds().size.width.0 as f64) + / longest_item_width; + // Uniform scroll handle might briefly report an offset greater than the length of a list; + // in such case we'll adjust the starting offset as well to keep the scrollbar thumb length stable. + let overshoot = (end_offset - 1.).clamp(0., 1.); + if overshoot > 0. { + percentage -= overshoot; + } + const MINIMUM_SCROLLBAR_PERCENTAGE_WIDTH: f64 = 0.005; + if percentage + MINIMUM_SCROLLBAR_PERCENTAGE_WIDTH > 1.0 || end_offset > longest_item_width + { + return None; + } + if longest_item_width < scroll_handle.base_handle.bounds().size.width.0 as f64 { + return None; + } + let end_offset = end_offset.clamp(percentage + MINIMUM_SCROLLBAR_PERCENTAGE_WIDTH, 1.); + Some( + div() + .occlude() + .id("project-panel-horizontal-scroll") + .on_mouse_move(cx.listener(|_, _, cx| { + cx.notify(); + cx.stop_propagation() + })) + .on_hover(|_, cx| { + cx.stop_propagation(); + }) + .on_any_mouse_down(|_, cx| { + cx.stop_propagation(); + }) + .on_mouse_up( + MouseButton::Left, + cx.listener(|this, _, cx| { + if this.horizontal_scrollbar_drag_thumb_offset.get().is_none() + && !this.focus_handle.contains_focused(cx) + { + this.hide_scrollbar(cx); + cx.notify(); + } + + cx.stop_propagation(); + }), + ) + .on_scroll_wheel(cx.listener(|_, _, cx| { + cx.notify(); + })) + .w_full() + .absolute() + .right_1() + .left_1() + .bottom_1() + .h(px(12.)) + .cursor_default() + .when(self.width.is_some(), |this| { + this.child(ProjectPanelScrollbar::horizontal( + percentage as f32..end_offset as f32, + self.scroll_handle.clone(), + self.horizontal_scrollbar_drag_thumb_offset.clone(), + cx.view().entity_id(), + )) + }), + ) + } + fn dispatch_context(&self, cx: &ViewContext) -> KeyContext { let mut dispatch_context = KeyContext::new_with_defaults(); dispatch_context.add("ProjectPanel"); @@ -2595,9 +2784,32 @@ impl ProjectPanel { dispatch_context } + fn should_show_scrollbar(cx: &AppContext) -> bool { + let show = ProjectPanelSettings::get_global(cx) + .scrollbar + .show + .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show); + match show { + ShowScrollbar::Auto => true, + ShowScrollbar::System => true, + ShowScrollbar::Always => true, + ShowScrollbar::Never => false, + } + } + fn should_autohide_scrollbar(cx: &AppContext) -> bool { - cx.try_global::() - .map_or_else(|| cx.should_auto_hide_scrollbars(), |autohide| autohide.0) + let show = ProjectPanelSettings::get_global(cx) + .scrollbar + .show + .unwrap_or_else(|| EditorSettings::get_global(cx).scrollbar.show); + match show { + ShowScrollbar::Auto => true, + ShowScrollbar::System => cx + .try_global::() + .map_or_else(|| cx.should_auto_hide_scrollbars(), |autohide| autohide.0), + ShowScrollbar::Always => false, + ShowScrollbar::Never => true, + } } fn hide_scrollbar(&mut self, cx: &mut ViewContext) { @@ -2623,7 +2835,7 @@ impl ProjectPanel { project: Model, entry_id: ProjectEntryId, skip_ignored: bool, - cx: &mut ViewContext<'_, ProjectPanel>, + cx: &mut ViewContext<'_, Self>, ) { if let Some(worktree) = project.read(cx).worktree_for_entry(entry_id, cx) { let worktree = worktree.read(cx); @@ -2645,13 +2857,22 @@ impl ProjectPanel { } } +fn item_width_estimate(depth: usize, item_text_chars: usize, is_symlink: bool) -> usize { + const ICON_SIZE_FACTOR: usize = 2; + let mut item_width = depth * ICON_SIZE_FACTOR + item_text_chars; + if is_symlink { + item_width += ICON_SIZE_FACTOR; + } + item_width +} + impl Render for ProjectPanel { fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { let has_worktree = !self.visible_entries.is_empty(); let project = self.project.read(cx); if has_worktree { - let items_count = self + let item_count = self .visible_entries .iter() .map(|(_, worktree_entries, _)| worktree_entries.len()) @@ -2742,7 +2963,7 @@ impl Render for ProjectPanel { ) .track_focus(&self.focus_handle) .child( - uniform_list(cx.view().clone(), "entries", items_count, { + uniform_list(cx.view().clone(), "entries", item_count, { |this, range, cx| { let mut items = Vec::with_capacity(range.end - range.start); this.for_each_visible_entry(range, cx, |id, details, cx| { @@ -2753,9 +2974,12 @@ impl Render for ProjectPanel { }) .size_full() .with_sizing_behavior(ListSizingBehavior::Infer) + .with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained) + .with_width_from_item(self.max_width_item_index) .track_scroll(self.scroll_handle.clone()), ) - .children(self.render_scrollbar(items_count, cx)) + .children(self.render_vertical_scrollbar(cx)) + .children(self.render_horizontal_scrollbar(cx)) .children(self.context_menu.as_ref().map(|(menu, position, _)| { deferred( anchored() @@ -2934,6 +3158,7 @@ mod tests { use serde_json::json; use settings::SettingsStore; use std::path::{Path, PathBuf}; + use ui::Context; use workspace::{ item::{Item, ProjectItem}, register_project_item, AppState, diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 4d73ae92456da9..0114b3968d4db5 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -1,3 +1,4 @@ +use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; @@ -24,33 +25,20 @@ pub struct ProjectPanelSettings { pub scrollbar: ScrollbarSettings, } -/// When to show the scrollbar in the project panel. -/// -/// Default: always -#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] -#[serde(rename_all = "snake_case")] -pub enum ShowScrollbar { - #[default] - /// Always show the scrollbar. - Always, - /// Never show the scrollbar. - Never, -} - #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettings { /// When to show the scrollbar in the project panel. /// - /// Default: always - pub show: ShowScrollbar, + /// Default: inherits editor scrollbar settings + pub show: Option, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] pub struct ScrollbarSettingsContent { /// When to show the scrollbar in the project panel. /// - /// Default: always - pub show: Option, + /// Default: inherits editor scrollbar settings + pub show: Option>, } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] diff --git a/crates/project_panel/src/scrollbar.rs b/crates/project_panel/src/scrollbar.rs index 0da9db7cb794f0..cb7b15386c2070 100644 --- a/crates/project_panel/src/scrollbar.rs +++ b/crates/project_panel/src/scrollbar.rs @@ -1,34 +1,54 @@ use std::{cell::Cell, ops::Range, rc::Rc}; use gpui::{ - point, AnyView, Bounds, ContentMask, Hitbox, MouseDownEvent, MouseMoveEvent, MouseUpEvent, - ScrollWheelEvent, Style, UniformListScrollHandle, + point, quad, Bounds, ContentMask, Corners, Edges, EntityId, Hitbox, Hsla, MouseDownEvent, + MouseMoveEvent, MouseUpEvent, ScrollWheelEvent, Style, UniformListScrollHandle, }; use ui::{prelude::*, px, relative, IntoElement}; +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum ScrollbarKind { + Horizontal, + Vertical, +} + pub(crate) struct ProjectPanelScrollbar { thumb: Range, scroll: UniformListScrollHandle, // If Some(), there's an active drag, offset by percentage from the top of thumb. scrollbar_drag_state: Rc>>, - item_count: usize, - view: AnyView, + kind: ScrollbarKind, + parent_id: EntityId, } impl ProjectPanelScrollbar { - pub(crate) fn new( + pub(crate) fn vertical( thumb: Range, scroll: UniformListScrollHandle, scrollbar_drag_state: Rc>>, - view: AnyView, - item_count: usize, + parent_id: EntityId, ) -> Self { Self { thumb, scroll, scrollbar_drag_state, - item_count, - view, + kind: ScrollbarKind::Vertical, + parent_id, + } + } + + pub(crate) fn horizontal( + thumb: Range, + scroll: UniformListScrollHandle, + scrollbar_drag_state: Rc>>, + parent_id: EntityId, + ) -> Self { + Self { + thumb, + scroll, + scrollbar_drag_state, + kind: ScrollbarKind::Horizontal, + parent_id, } } } @@ -50,8 +70,14 @@ impl gpui::Element for ProjectPanelScrollbar { let mut style = Style::default(); style.flex_grow = 1.; style.flex_shrink = 1.; - style.size.width = px(12.).into(); - style.size.height = relative(1.).into(); + if self.kind == ScrollbarKind::Vertical { + style.size.width = px(12.).into(); + style.size.height = relative(1.).into(); + } else { + style.size.width = relative(1.).into(); + style.size.height = px(12.).into(); + } + (cx.request_layout(style, None), ()) } @@ -77,25 +103,65 @@ impl gpui::Element for ProjectPanelScrollbar { ) { cx.with_content_mask(Some(ContentMask { bounds }), |cx| { let colors = cx.theme().colors(); - let scrollbar_background = colors.scrollbar_track_background; let thumb_background = colors.scrollbar_thumb_background; - cx.paint_quad(gpui::fill(bounds, scrollbar_background)); - - let thumb_offset = self.thumb.start * bounds.size.height; - let thumb_end = self.thumb.end * bounds.size.height; + let is_vertical = self.kind == ScrollbarKind::Vertical; + let extra_padding = px(5.0); + let padded_bounds = if is_vertical { + Bounds::from_corners( + bounds.origin + point(Pixels::ZERO, extra_padding), + bounds.lower_right() - point(Pixels::ZERO, extra_padding * 3), + ) + } else { + Bounds::from_corners( + bounds.origin + point(extra_padding, Pixels::ZERO), + bounds.lower_right() - point(extra_padding * 3, Pixels::ZERO), + ) + }; - let thumb_percentage_size = self.thumb.end - self.thumb.start; - let thumb_bounds = { - let thumb_upper_left = point(bounds.origin.x, bounds.origin.y + thumb_offset); + let mut thumb_bounds = if is_vertical { + let thumb_offset = self.thumb.start * padded_bounds.size.height; + let thumb_end = self.thumb.end * padded_bounds.size.height; + let thumb_upper_left = point( + padded_bounds.origin.x, + padded_bounds.origin.y + thumb_offset, + ); let thumb_lower_right = point( - bounds.origin.x + bounds.size.width, - bounds.origin.y + thumb_end, + padded_bounds.origin.x + padded_bounds.size.width, + padded_bounds.origin.y + thumb_end, ); Bounds::from_corners(thumb_upper_left, thumb_lower_right) + } else { + let thumb_offset = self.thumb.start * padded_bounds.size.width; + let thumb_end = self.thumb.end * padded_bounds.size.width; + let thumb_upper_left = point( + padded_bounds.origin.x + thumb_offset, + padded_bounds.origin.y, + ); + let thumb_lower_right = point( + padded_bounds.origin.x + thumb_end, + padded_bounds.origin.y + padded_bounds.size.height, + ); + Bounds::from_corners(thumb_upper_left, thumb_lower_right) + }; + let corners = if is_vertical { + thumb_bounds.size.width /= 1.5; + Corners::all(thumb_bounds.size.width / 2.0) + } else { + thumb_bounds.size.height /= 1.5; + Corners::all(thumb_bounds.size.height / 2.0) }; - cx.paint_quad(gpui::fill(thumb_bounds, thumb_background)); + cx.paint_quad(quad( + thumb_bounds, + corners, + thumb_background, + Edges::default(), + Hsla::transparent_black(), + )); + let scroll = self.scroll.clone(); - let item_count = self.item_count; + let kind = self.kind; + let thumb_percentage_size = self.thumb.end - self.thumb.start; + cx.on_mouse_event({ let scroll = self.scroll.clone(); let is_dragging = self.scrollbar_drag_state.clone(); @@ -103,20 +169,37 @@ impl gpui::Element for ProjectPanelScrollbar { if phase.bubble() && bounds.contains(&event.position) { if !thumb_bounds.contains(&event.position) { let scroll = scroll.0.borrow(); - if let Some(last_height) = scroll.last_item_height { - let max_offset = item_count as f32 * last_height; - let percentage = - (event.position.y - bounds.origin.y) / bounds.size.height; - - let percentage = percentage.min(1. - thumb_percentage_size); - scroll - .base_handle - .set_offset(point(px(0.), -max_offset * percentage)); + if let Some(item_size) = scroll.last_item_size { + match kind { + ScrollbarKind::Horizontal => { + let percentage = (event.position.x - bounds.origin.x) + / bounds.size.width; + let max_offset = item_size.contents.width; + let percentage = percentage.min(1. - thumb_percentage_size); + scroll.base_handle.set_offset(point( + -max_offset * percentage, + scroll.base_handle.offset().y, + )); + } + ScrollbarKind::Vertical => { + let percentage = (event.position.y - bounds.origin.y) + / bounds.size.height; + let max_offset = item_size.contents.height; + let percentage = percentage.min(1. - thumb_percentage_size); + scroll.base_handle.set_offset(point( + scroll.base_handle.offset().x, + -max_offset * percentage, + )); + } + } } } else { - let thumb_top_offset = - (event.position.y - thumb_bounds.origin.y) / bounds.size.height; - is_dragging.set(Some(thumb_top_offset)); + let thumb_offset = if is_vertical { + (event.position.y - thumb_bounds.origin.y) / bounds.size.height + } else { + (event.position.x - thumb_bounds.origin.x) / bounds.size.width + }; + is_dragging.set(Some(thumb_offset)); } } } @@ -127,6 +210,7 @@ impl gpui::Element for ProjectPanelScrollbar { if phase.bubble() && bounds.contains(&event.position) { let scroll = scroll.0.borrow_mut(); let current_offset = scroll.base_handle.offset(); + scroll .base_handle .set_offset(current_offset + event.delta.pixel_delta(cx.line_height())); @@ -134,19 +218,39 @@ impl gpui::Element for ProjectPanelScrollbar { } }); let drag_state = self.scrollbar_drag_state.clone(); - let view_id = self.view.entity_id(); + let view_id = self.parent_id; + let kind = self.kind; cx.on_mouse_event(move |event: &MouseMoveEvent, _, cx| { if let Some(drag_state) = drag_state.get().filter(|_| event.dragging()) { let scroll = scroll.0.borrow(); - if let Some(last_height) = scroll.last_item_height { - let max_offset = item_count as f32 * last_height; - let percentage = - (event.position.y - bounds.origin.y) / bounds.size.height - drag_state; + if let Some(item_size) = scroll.last_item_size { + match kind { + ScrollbarKind::Horizontal => { + let max_offset = item_size.contents.width; + let percentage = (event.position.x - bounds.origin.x) + / bounds.size.width + - drag_state; + + let percentage = percentage.min(1. - thumb_percentage_size); + scroll.base_handle.set_offset(point( + -max_offset * percentage, + scroll.base_handle.offset().y, + )); + } + ScrollbarKind::Vertical => { + let max_offset = item_size.contents.height; + let percentage = (event.position.y - bounds.origin.y) + / bounds.size.height + - drag_state; + + let percentage = percentage.min(1. - thumb_percentage_size); + scroll.base_handle.set_offset(point( + scroll.base_handle.offset().x, + -max_offset * percentage, + )); + } + }; - let percentage = percentage.min(1. - thumb_percentage_size); - scroll - .base_handle - .set_offset(point(px(0.), -max_offset * percentage)); cx.notify(view_id); } } else { diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index e1c90894fdd3d2..e13fb8ef265ebf 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -36,6 +36,7 @@ pub struct ListItem { on_secondary_mouse_down: Option>, children: SmallVec<[AnyElement; 2]>, selectable: bool, + overflow_x: bool, } impl ListItem { @@ -58,6 +59,7 @@ impl ListItem { tooltip: None, children: SmallVec::new(), selectable: true, + overflow_x: false, } } @@ -131,6 +133,11 @@ impl ListItem { self.end_hover_slot = end_hover_slot.into().map(IntoElement::into_any_element); self } + + pub fn overflow_x(mut self) -> Self { + self.overflow_x = true; + self + } } impl Disableable for ListItem { @@ -239,7 +246,13 @@ impl RenderOnce for ListItem { .flex_shrink_0() .flex_basis(relative(0.25)) .gap(Spacing::Small.rems(cx)) - .overflow_hidden() + .map(|list_content| { + if self.overflow_x { + list_content + } else { + list_content.overflow_hidden() + } + }) .children(self.start_slot) .children(self.children), ) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 1e531f7c744d18..fbd5fa53cfbd84 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1954,7 +1954,7 @@ Run the `theme selector: toggle` action in the command palette to see a current "auto_reveal_entries": true, "auto_fold_dirs": true, "scrollbar": { - "show": "always" + "show": null } } } @@ -2074,13 +2074,13 @@ Run the `theme selector: toggle` action in the command palette to see a current ### Scrollbar -- Description: Scrollbar related settings. Possible values: "always", "never". +- Description: Scrollbar related settings. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. - Setting: `scrollbar` - Default: ```json "scrollbar": { - "show": "always" + "show": null } ``` From 280b8a89ea132f225b249ce743839c0cab96b05b Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 1 Oct 2024 12:40:18 -0400 Subject: [PATCH 425/762] editor: Allow opening excerpts from proposed changes editor (#18591) This PR adds the ability to open excerpts in the base buffer from the proposed changes editor. Release Notes: - N/A --- crates/editor/src/proposed_changes_editor.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index ec0c05d88382c0..a791e87e62053f 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -11,7 +11,7 @@ use text::ToOffset; use ui::prelude::*; use workspace::{ searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation, - ToolbarItemView, + ToolbarItemView, Workspace, }; pub struct ProposedChangesEditor { @@ -159,6 +159,11 @@ impl Item for ProposedChangesEditor { None } } + + fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext) { + self.editor + .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx)); + } } impl ProposedChangesEditorToolbar { From eb962b7bfc15dd77c702cfcbf7dc32585e89749a Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 1 Oct 2024 13:05:50 -0400 Subject: [PATCH 426/762] editor: Include proposed changes editor in navigation history (#18593) This PR makes it so the proposed changes editor works with the workspace navigation history. This allows for easily navigating back to the proposed changes editor after opening one of the excerpts into the base buffer. Release Notes: - N/A --- crates/editor/src/proposed_changes_editor.rs | 22 +++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index a791e87e62053f..0666346e487764 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -161,8 +161,28 @@ impl Item for ProposedChangesEditor { } fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext) { + self.editor.update(cx, |editor, cx| { + Item::added_to_workspace(editor, workspace, cx) + }); + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + self.editor.update(cx, Item::deactivated); + } + + fn navigate(&mut self, data: Box, cx: &mut ViewContext) -> bool { self.editor - .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx)); + .update(cx, |editor, cx| Item::navigate(editor, data, cx)) + } + + fn set_nav_history( + &mut self, + nav_history: workspace::ItemNavHistory, + cx: &mut ViewContext, + ) { + self.editor.update(cx, |editor, cx| { + Item::set_nav_history(editor, nav_history, cx) + }); } } From d14e36b3231071d2da7492bc89e12b6f964fee3a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Oct 2024 11:07:52 -0600 Subject: [PATCH 427/762] Add an apply button to hunks in proposed changes editor (#18592) Release Notes: - N/A --------- Co-authored-by: Antonio Co-authored-by: Nathan --- crates/editor/src/actions.rs | 1 + crates/editor/src/editor.rs | 14 + crates/editor/src/element.rs | 1 + crates/editor/src/hunk_diff.rs | 272 +++++++++++-------- crates/editor/src/proposed_changes_editor.rs | 52 ++-- crates/language/src/buffer.rs | 55 ++-- crates/language/src/buffer_tests.rs | 16 +- 7 files changed, 248 insertions(+), 163 deletions(-) diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index b5935782580ba3..502b70361b4f8e 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -193,6 +193,7 @@ gpui::actions!( AcceptPartialInlineCompletion, AddSelectionAbove, AddSelectionBelow, + ApplyDiffHunk, Backspace, Cancel, CancelLanguageServerWork, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 61a47d7f631fd8..b43433e3f41e6e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6205,6 +6205,20 @@ impl Editor { } } + fn apply_selected_diff_hunks(&mut self, _: &ApplyDiffHunk, cx: &mut ViewContext) { + let snapshot = self.buffer.read(cx).snapshot(cx); + let hunks = hunks_for_selections(&snapshot, &self.selections.disjoint_anchors()); + self.transact(cx, |editor, cx| { + for hunk in hunks { + if let Some(buffer) = editor.buffer.read(cx).buffer(hunk.buffer_id) { + buffer.update(cx, |buffer, cx| { + buffer.merge_into_base(Some(hunk.buffer_range.to_offset(buffer)), cx); + }); + } + } + }); + } + pub fn open_active_item_in_terminal(&mut self, _: &OpenInTerminal, cx: &mut ViewContext) { if let Some(working_directory) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { let project_path = buffer.read(cx).project_path(cx)?; diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 98a5ff7f4dff50..8a0735354720d1 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -436,6 +436,7 @@ impl EditorElement { register_action(view, cx, Editor::accept_inline_completion); register_action(view, cx, Editor::revert_file); register_action(view, cx, Editor::revert_selected_hunks); + register_action(view, cx, Editor::apply_selected_diff_hunks); register_action(view, cx, Editor::open_active_item_in_terminal) } diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 2ccd60c668de7f..c8caa30b59c49c 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -14,9 +14,9 @@ use ui::{ use util::RangeExt; use crate::{ - editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, BlockDisposition, - BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, - Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, + editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyDiffHunk, + BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, + DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; @@ -238,19 +238,14 @@ impl Editor { cx: &mut ViewContext<'_, Editor>, ) -> Option<()> { let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let multi_buffer_row_range = hunk - .multi_buffer_range - .start - .to_point(&multi_buffer_snapshot) - ..hunk.multi_buffer_range.end.to_point(&multi_buffer_snapshot); - let hunk_start = hunk.multi_buffer_range.start; - let hunk_end = hunk.multi_buffer_range.end; + let hunk_range = hunk.multi_buffer_range.clone(); + let hunk_point_range = hunk_range.to_point(&multi_buffer_snapshot); let buffer = self.buffer().clone(); let snapshot = self.snapshot(cx); let (diff_base_buffer, deleted_text_lines) = buffer.update(cx, |buffer, cx| { - let hunk = buffer_diff_hunk(&snapshot.buffer_snapshot, multi_buffer_row_range.clone())?; - let mut buffer_ranges = buffer.range_to_buffer_ranges(multi_buffer_row_range, cx); + let hunk = buffer_diff_hunk(&snapshot.buffer_snapshot, hunk_point_range.clone())?; + let mut buffer_ranges = buffer.range_to_buffer_ranges(hunk_point_range, cx); if buffer_ranges.len() == 1 { let (buffer, _, _) = buffer_ranges.pop()?; let diff_base_buffer = diff_base_buffer @@ -275,7 +270,7 @@ impl Editor { probe .hunk_range .start - .cmp(&hunk_start, &multi_buffer_snapshot) + .cmp(&hunk_range.start, &multi_buffer_snapshot) }) { Ok(_already_present) => return None, Err(ix) => ix, @@ -295,7 +290,7 @@ impl Editor { } DiffHunkStatus::Added => { self.highlight_rows::( - hunk_start..hunk_end, + hunk_range.clone(), added_hunk_color(cx), false, cx, @@ -304,7 +299,7 @@ impl Editor { } DiffHunkStatus::Modified => { self.highlight_rows::( - hunk_start..hunk_end, + hunk_range.clone(), added_hunk_color(cx), false, cx, @@ -323,7 +318,7 @@ impl Editor { block_insert_index, ExpandedHunk { blocks, - hunk_range: hunk_start..hunk_end, + hunk_range, status: hunk.status, folded: false, diff_base_byte_range: hunk.diff_base_byte_range.clone(), @@ -333,11 +328,47 @@ impl Editor { Some(()) } + fn apply_changes_in_range( + &mut self, + range: Range, + cx: &mut ViewContext<'_, Editor>, + ) -> Option<()> { + let (buffer, range, _) = self + .buffer + .read(cx) + .range_to_buffer_ranges(range, cx) + .into_iter() + .next()?; + + buffer.update(cx, |branch_buffer, cx| { + branch_buffer.merge_into_base(Some(range), cx); + }); + + None + } + + pub(crate) fn apply_all_changes(&self, cx: &mut ViewContext) { + let buffers = self.buffer.read(cx).all_buffers(); + for branch_buffer in buffers { + branch_buffer.update(cx, |branch_buffer, cx| { + branch_buffer.merge_into_base(None, cx); + }); + } + } + fn hunk_header_block( &self, hunk: &HoveredHunk, cx: &mut ViewContext<'_, Editor>, ) -> BlockProperties { + let is_branch_buffer = self + .buffer + .read(cx) + .point_to_buffer_offset(hunk.multi_buffer_range.start, cx) + .map_or(false, |(buffer, _, _)| { + buffer.read(cx).diff_base_buffer().is_some() + }); + let border_color = cx.theme().colors().border_variant; let gutter_color = match hunk.status { DiffHunkStatus::Added => cx.theme().status().created, @@ -388,6 +419,10 @@ impl Editor { .pr_6() .size_full() .justify_between() + .border_t_1() + .pl_6() + .pr_6() + .border_color(border_color) .child( h_flex() .gap_1() @@ -411,43 +446,10 @@ impl Editor { let hunk = hunk.clone(); move |_event, cx| { editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let position = hunk - .multi_buffer_range - .end - .to_point( - &snapshot.buffer_snapshot, - ); - if let Some(hunk) = editor - .go_to_hunk_after_position( - &snapshot, position, cx, - ) - { - let multi_buffer_start = snapshot - .buffer_snapshot - .anchor_before(Point::new( - hunk.row_range.start.0, - 0, - )); - let multi_buffer_end = snapshot - .buffer_snapshot - .anchor_after(Point::new( - hunk.row_range.end.0, - 0, - )); - editor.expand_diff_hunk( - None, - &HoveredHunk { - multi_buffer_range: - multi_buffer_start - ..multi_buffer_end, - status: hunk_status(&hunk), - diff_base_byte_range: hunk - .diff_base_byte_range, - }, - cx, - ); - } + editor.go_to_subsequent_hunk( + hunk.multi_buffer_range.end, + cx, + ); }); } }), @@ -472,43 +474,10 @@ impl Editor { let hunk = hunk.clone(); move |_event, cx| { editor.update(cx, |editor, cx| { - let snapshot = editor.snapshot(cx); - let position = hunk - .multi_buffer_range - .start - .to_point( - &snapshot.buffer_snapshot, - ); - let hunk = editor - .go_to_hunk_before_position( - &snapshot, position, cx, - ); - if let Some(hunk) = hunk { - let multi_buffer_start = snapshot - .buffer_snapshot - .anchor_before(Point::new( - hunk.row_range.start.0, - 0, - )); - let multi_buffer_end = snapshot - .buffer_snapshot - .anchor_after(Point::new( - hunk.row_range.end.0, - 0, - )); - editor.expand_diff_hunk( - None, - &HoveredHunk { - multi_buffer_range: - multi_buffer_start - ..multi_buffer_end, - status: hunk_status(&hunk), - diff_base_byte_range: hunk - .diff_base_byte_range, - }, - cx, - ); - } + editor.go_to_preceding_hunk( + hunk.multi_buffer_range.start, + cx, + ); }); } }), @@ -558,6 +527,36 @@ impl Editor { } }), ) + .when(is_branch_buffer, |this| { + this.child( + IconButton::new("apply", IconName::Check) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Apply Hunk", + &ApplyDiffHunk, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.apply_changes_in_range( + hunk.multi_buffer_range.clone(), + cx, + ); + }); + } + }), + ) + }) .child({ let focus = editor.focus_handle(cx); PopoverMenu::new("hunk-controls-dropdown") @@ -597,31 +596,29 @@ impl Editor { }), ) .child( - div().child( - IconButton::new("collapse", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Collapse Hunk", - &ToggleHunkDiff, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); - }); - } - }), - ), + IconButton::new("collapse", IconName::Close) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Collapse Hunk", + &ToggleHunkDiff, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), ), ) .into_any_element() @@ -876,6 +873,51 @@ impl Editor { } }) } + + fn go_to_subsequent_hunk(&mut self, position: Anchor, cx: &mut ViewContext) { + let snapshot = self.snapshot(cx); + let position = position.to_point(&snapshot.buffer_snapshot); + if let Some(hunk) = self.go_to_hunk_after_position(&snapshot, position, cx) { + let multi_buffer_start = snapshot + .buffer_snapshot + .anchor_before(Point::new(hunk.row_range.start.0, 0)); + let multi_buffer_end = snapshot + .buffer_snapshot + .anchor_after(Point::new(hunk.row_range.end.0, 0)); + self.expand_diff_hunk( + None, + &HoveredHunk { + multi_buffer_range: multi_buffer_start..multi_buffer_end, + status: hunk_status(&hunk), + diff_base_byte_range: hunk.diff_base_byte_range, + }, + cx, + ); + } + } + + fn go_to_preceding_hunk(&mut self, position: Anchor, cx: &mut ViewContext) { + let snapshot = self.snapshot(cx); + let position = position.to_point(&snapshot.buffer_snapshot); + let hunk = self.go_to_hunk_before_position(&snapshot, position, cx); + if let Some(hunk) = hunk { + let multi_buffer_start = snapshot + .buffer_snapshot + .anchor_before(Point::new(hunk.row_range.start.0, 0)); + let multi_buffer_end = snapshot + .buffer_snapshot + .anchor_after(Point::new(hunk.row_range.end.0, 0)); + self.expand_diff_hunk( + None, + &HoveredHunk { + multi_buffer_range: multi_buffer_start..multi_buffer_end, + status: hunk_status(&hunk), + diff_base_byte_range: hunk.diff_base_byte_range, + }, + cx, + ); + } + } } fn to_diff_hunk( diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 0666346e487764..62e37bc677f5fc 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -18,7 +18,7 @@ pub struct ProposedChangesEditor { editor: View, _subscriptions: Vec, _recalculate_diffs_task: Task>, - recalculate_diffs_tx: mpsc::UnboundedSender>, + recalculate_diffs_tx: mpsc::UnboundedSender, } pub struct ProposedChangesBuffer { @@ -30,6 +30,11 @@ pub struct ProposedChangesEditorToolbar { current_editor: Option>, } +struct RecalculateDiff { + buffer: Model, + debounce: bool, +} + impl ProposedChangesEditor { pub fn new( buffers: Vec>, @@ -63,16 +68,18 @@ impl ProposedChangesEditor { recalculate_diffs_tx, _recalculate_diffs_task: cx.spawn(|_, mut cx| async move { let mut buffers_to_diff = HashSet::default(); - while let Some(buffer) = recalculate_diffs_rx.next().await { - buffers_to_diff.insert(buffer); + while let Some(mut recalculate_diff) = recalculate_diffs_rx.next().await { + buffers_to_diff.insert(recalculate_diff.buffer); - loop { + while recalculate_diff.debounce { cx.background_executor() .timer(Duration::from_millis(250)) .await; let mut had_further_changes = false; - while let Ok(next_buffer) = recalculate_diffs_rx.try_next() { - buffers_to_diff.insert(next_buffer?); + while let Ok(next_recalculate_diff) = recalculate_diffs_rx.try_next() { + let next_recalculate_diff = next_recalculate_diff?; + recalculate_diff.debounce &= next_recalculate_diff.debounce; + buffers_to_diff.insert(next_recalculate_diff.buffer); had_further_changes = true; } if !had_further_changes { @@ -99,19 +106,24 @@ impl ProposedChangesEditor { event: &BufferEvent, _cx: &mut ViewContext, ) { - if let BufferEvent::Edited = event { - self.recalculate_diffs_tx.unbounded_send(buffer).ok(); - } - } - - fn apply_all_changes(&self, cx: &mut ViewContext) { - let buffers = self.editor.read(cx).buffer.read(cx).all_buffers(); - for branch_buffer in buffers { - if let Some(base_buffer) = branch_buffer.read(cx).diff_base_buffer() { - base_buffer.update(cx, |base_buffer, cx| { - base_buffer.merge(&branch_buffer, None, cx) - }); + match event { + BufferEvent::Operation { .. } => { + self.recalculate_diffs_tx + .unbounded_send(RecalculateDiff { + buffer, + debounce: true, + }) + .ok(); + } + BufferEvent::DiffBaseChanged => { + self.recalculate_diffs_tx + .unbounded_send(RecalculateDiff { + buffer, + debounce: false, + }) + .ok(); } + _ => (), } } } @@ -208,7 +220,9 @@ impl Render for ProposedChangesEditorToolbar { Button::new("apply-changes", "Apply All").on_click(move |_, cx| { if let Some(editor) = &editor { editor.update(cx, |editor, cx| { - editor.apply_all_changes(cx); + editor.editor.update(cx, |editor, cx| { + editor.apply_all_changes(cx); + }) }); } }) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 7abc9b8dba146a..8afc4d389db7f5 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -62,7 +62,7 @@ pub use text::{ use theme::SyntaxTheme; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; -use util::RangeExt; +use util::{debug_panic, RangeExt}; #[cfg(any(test, feature = "test-support"))] pub use {tree_sitter_rust, tree_sitter_typescript}; @@ -823,40 +823,41 @@ impl Buffer { }) } - /// Applies all of the changes in `branch` buffer that intersect the given `range` - /// to this buffer. - pub fn merge( - &mut self, - branch: &Model, - range: Option>, - cx: &mut ModelContext, - ) { - let edits = branch.read_with(cx, |branch, _| { - branch - .edits_since_in_range::( - &self.version, - range.unwrap_or(Anchor::MIN..Anchor::MAX), - ) - .map(|edit| { - ( - edit.old, - branch.text_for_range(edit.new).collect::(), - ) + /// Applies all of the changes in this buffer that intersect the given `range` + /// to its base buffer. This buffer must be a branch buffer to call this method. + pub fn merge_into_base(&mut self, range: Option>, cx: &mut ModelContext) { + let Some(base_buffer) = self.diff_base_buffer() else { + debug_panic!("not a branch buffer"); + return; + }; + + base_buffer.update(cx, |base_buffer, cx| { + let edits = self + .edits_since::(&base_buffer.version) + .filter_map(|edit| { + if range + .as_ref() + .map_or(true, |range| range.overlaps(&edit.new)) + { + Some((edit.old, self.text_for_range(edit.new).collect::())) + } else { + None + } }) - .collect::>() - }); - let operation = self.edit(edits, None, cx); + .collect::>(); + + let operation = base_buffer.edit(edits, None, cx); - // Prevent this operation from being reapplied to the branch. - branch.update(cx, |branch, cx| { + // Prevent this operation from being reapplied to the branch. if let Some(BufferDiffBase::PastBufferVersion { operations_to_ignore, .. - }) = &mut branch.diff_base + }) = &mut self.diff_base { operations_to_ignore.extend(operation); } - cx.emit(BufferEvent::Edited) + + cx.emit(BufferEvent::DiffBaseChanged); }); } diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 49cc31067b93ae..da53d5a7637b99 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -2471,8 +2471,8 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { }); // Merging the branch applies all of its changes to the base. - base_buffer.update(cx, |base_buffer, cx| { - base_buffer.merge(&branch_buffer, None, cx); + branch_buffer.update(cx, |branch_buffer, cx| { + branch_buffer.merge_into_base(None, cx); }); branch_buffer.update(cx, |branch_buffer, cx| { @@ -2484,6 +2484,18 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_merge_into_base(cx: &mut AppContext) { + init_settings(cx, |_| {}); + let base = cx.new_model(|cx| Buffer::local("abcdefghijk", cx)); + let branch = base.update(cx, |buffer, cx| buffer.branch(cx)); + branch.update(cx, |branch, cx| { + branch.edit([(0..3, "ABC"), (7..9, "HI")], None, cx); + branch.merge_into_base(Some(5..8), cx); + }); + assert_eq!(base.read(cx).text(), "abcdefgHIjk"); +} + fn start_recalculating_diff(buffer: &Model, cx: &mut TestAppContext) { buffer .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) From 9b148f3dcc5e4281bfadd515efd817bcdbf21bc3 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Wed, 2 Oct 2024 01:32:31 +0800 Subject: [PATCH 428/762] Limit the value can be set for font weight (#18594) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #18531 This PR limits the range of values that can be set for `FontWeight`. Since any value less than 1.0 or greater than 999.9 causes Zed to crash on Windows, I’ve restricted `FontWeight` to this range. I could apply this constraint only on Windows, but considering the documentation at https://zed.dev/docs/configuring-zed#buffer-font-weight indicates that `FontWeight` should be between 100 and 900, I thought it might be a good idea to apply this restriction in the settings. Release Notes: - Changed `ui_font_weight` and `buffer_font_weight` settings to require values to be between `100` and `950` (inclusive). --------- Co-authored-by: Marshall Bowers --- crates/theme/src/settings.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 7fa9a870de559b..86383cec8ea079 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -520,6 +520,10 @@ pub fn reset_ui_font_size(cx: &mut AppContext) { } } +fn clamp_font_weight(weight: f32) -> FontWeight { + FontWeight(weight.clamp(100., 950.)) +} + impl settings::Settings for ThemeSettings { const KEY: Option<&'static str> = None; @@ -579,7 +583,7 @@ impl settings::Settings for ThemeSettings { this.buffer_font.fallbacks = Some(FontFallbacks::from_fonts(value)); } if let Some(value) = value.buffer_font_weight { - this.buffer_font.weight = FontWeight(value); + this.buffer_font.weight = clamp_font_weight(value); } if let Some(value) = value.ui_font_family.clone() { @@ -592,7 +596,7 @@ impl settings::Settings for ThemeSettings { this.ui_font.fallbacks = Some(FontFallbacks::from_fonts(value)); } if let Some(value) = value.ui_font_weight { - this.ui_font.weight = FontWeight(value); + this.ui_font.weight = clamp_font_weight(value); } if let Some(value) = &value.theme { From 7dcb0de28cb3abf482b81e2821332cf234891cda Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Oct 2024 12:58:12 -0600 Subject: [PATCH 429/762] Keep all hunks expanded in proposed change editor (#18598) Also, fix visual bug when pressing escape with a non-empty selection in a deleted text block. Release Notes: - N/A Co-authored-by: Antonio --- crates/editor/src/editor.rs | 2 +- crates/editor/src/hunk_diff.rs | 440 +++++++++++-------- crates/editor/src/proposed_changes_editor.rs | 7 +- 3 files changed, 259 insertions(+), 190 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b43433e3f41e6e..54a1318bdfe52c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3059,7 +3059,7 @@ impl Editor { } pub fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext) { - if self.clear_clicked_diff_hunks(cx) { + if self.clear_expanded_diff_hunks(cx) { cx.notify(); return; } diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index c8caa30b59c49c..ff3451fc9216b0 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -32,6 +32,7 @@ pub(super) struct ExpandedHunks { pub(crate) hunks: Vec, diff_base: HashMap, hunk_update_tasks: HashMap, Task<()>>, + expand_all: bool, } #[derive(Debug, Clone)] @@ -72,6 +73,10 @@ impl ExpandedHunks { } impl Editor { + pub fn set_expand_all_diff_hunks(&mut self) { + self.expanded_hunks.expand_all = true; + } + pub(super) fn toggle_hovered_hunk( &mut self, hovered_hunk: &HoveredHunk, @@ -133,6 +138,10 @@ impl Editor { hunks_to_toggle: Vec, cx: &mut ViewContext, ) { + if self.expanded_hunks.expand_all { + return; + } + let previous_toggle_task = self.expanded_hunks.hunk_update_tasks.remove(&None); let new_toggle_task = cx.spawn(move |editor, mut cx| async move { if let Some(task) = previous_toggle_task { @@ -426,62 +435,64 @@ impl Editor { .child( h_flex() .gap_1() - .child( - IconButton::new("next-hunk", IconName::ArrowDown) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Next Hunk", - &GoToHunk, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.go_to_subsequent_hunk( - hunk.multi_buffer_range.end, + .when(!is_branch_buffer, |row| { + row.child( + IconButton::new("next-hunk", IconName::ArrowDown) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Next Hunk", + &GoToHunk, + &focus_handle, cx, - ); - }); - } - }), - ) - .child( - IconButton::new("prev-hunk", IconName::ArrowUp) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Previous Hunk", - &GoToPrevHunk, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.go_to_preceding_hunk( - hunk.multi_buffer_range.start, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.go_to_subsequent_hunk( + hunk.multi_buffer_range.end, + cx, + ); + }); + } + }), + ) + .child( + IconButton::new("prev-hunk", IconName::ArrowUp) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Previous Hunk", + &GoToPrevHunk, + &focus_handle, cx, - ); - }); - } - }), - ) + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.go_to_preceding_hunk( + hunk.multi_buffer_range.start, + cx, + ); + }); + } + }), + ) + }) .child( IconButton::new("discard", IconName::Undo) .shape(IconButtonShape::Square) @@ -527,99 +538,115 @@ impl Editor { } }), ) - .when(is_branch_buffer, |this| { - this.child( - IconButton::new("apply", IconName::Check) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Apply Hunk", - &ApplyDiffHunk, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.apply_changes_in_range( - hunk.multi_buffer_range.clone(), + .map(|this| { + if is_branch_buffer { + this.child( + IconButton::new("apply", IconName::Check) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = + editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Apply Hunk", + &ApplyDiffHunk, + &focus_handle, cx, - ); - }); - } - }), - ) - }) - .child({ - let focus = editor.focus_handle(cx); - PopoverMenu::new("hunk-controls-dropdown") - .trigger( - IconButton::new( - "toggle_editor_selections_icon", - IconName::EllipsisVertical, - ) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .style(ButtonStyle::Subtle) - .selected( - hunk_controls_menu_handle.is_deployed(), - ) - .when( - !hunk_controls_menu_handle.is_deployed(), - |this| { - this.tooltip(|cx| { - Tooltip::text("Hunk Controls", cx) - }) - }, - ), + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.apply_changes_in_range( + hunk.multi_buffer_range + .clone(), + cx, + ); + }); + } + }), ) - .anchor(AnchorCorner::TopRight) - .with_handle(hunk_controls_menu_handle) - .menu(move |cx| { - let focus = focus.clone(); - let menu = - ContextMenu::build(cx, move |menu, _| { - menu.context(focus.clone()).action( - "Discard All", - RevertFile.boxed_clone(), + } else { + this.child({ + let focus = editor.focus_handle(cx); + PopoverMenu::new("hunk-controls-dropdown") + .trigger( + IconButton::new( + "toggle_editor_selections_icon", + IconName::EllipsisVertical, ) - }); - Some(menu) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .style(ButtonStyle::Subtle) + .selected( + hunk_controls_menu_handle + .is_deployed(), + ) + .when( + !hunk_controls_menu_handle + .is_deployed(), + |this| { + this.tooltip(|cx| { + Tooltip::text( + "Hunk Controls", + cx, + ) + }) + }, + ), + ) + .anchor(AnchorCorner::TopRight) + .with_handle(hunk_controls_menu_handle) + .menu(move |cx| { + let focus = focus.clone(); + let menu = ContextMenu::build( + cx, + move |menu, _| { + menu.context(focus.clone()) + .action( + "Discard All", + RevertFile + .boxed_clone(), + ) + }, + ); + Some(menu) + }) }) - }), - ) - .child( - IconButton::new("collapse", IconName::Close) - .shape(IconButtonShape::Square) - .icon_size(IconSize::Small) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |cx| { - Tooltip::for_action_in( - "Collapse Hunk", - &ToggleHunkDiff, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let editor = editor.clone(); - let hunk = hunk.clone(); - move |_event, cx| { - editor.update(cx, |editor, cx| { - editor.toggle_hovered_hunk(&hunk, cx); - }); } }), - ), + ) + .when(!is_branch_buffer, |div| { + div.child( + IconButton::new("collapse", IconName::Close) + .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) + .tooltip({ + let focus_handle = editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Collapse Hunk", + &ToggleHunkDiff, + &focus_handle, + cx, + ) + } + }) + .on_click({ + let editor = editor.clone(); + let hunk = hunk.clone(); + move |_event, cx| { + editor.update(cx, |editor, cx| { + editor.toggle_hovered_hunk(&hunk, cx); + }); + } + }), + ) + }), ) .into_any_element() } @@ -694,7 +721,10 @@ impl Editor { } } - pub(super) fn clear_clicked_diff_hunks(&mut self, cx: &mut ViewContext<'_, Editor>) -> bool { + pub(super) fn clear_expanded_diff_hunks(&mut self, cx: &mut ViewContext<'_, Editor>) -> bool { + if self.expanded_hunks.expand_all { + return false; + } self.expanded_hunks.hunk_update_tasks.clear(); self.clear_row_highlights::(); let to_remove = self @@ -798,33 +828,43 @@ impl Editor { status, } => { let hunk_display_range = display_row_range; + if expanded_hunk_display_range.start > hunk_display_range.end { recalculated_hunks.next(); - continue; - } else if expanded_hunk_display_range.end - < hunk_display_range.start - { - break; - } else { - if !expanded_hunk.folded - && expanded_hunk_display_range == hunk_display_range - && expanded_hunk.status == hunk_status(buffer_hunk) - && expanded_hunk.diff_base_byte_range - == buffer_hunk.diff_base_byte_range - { - recalculated_hunks.next(); - retain = true; - } else { + if editor.expanded_hunks.expand_all { hunks_to_reexpand.push(HoveredHunk { status, multi_buffer_range, diff_base_byte_range, }); } + continue; + } + + if expanded_hunk_display_range.end + < hunk_display_range.start + { break; } + + if !expanded_hunk.folded + && expanded_hunk_display_range == hunk_display_range + && expanded_hunk.status == hunk_status(buffer_hunk) + && expanded_hunk.diff_base_byte_range + == buffer_hunk.diff_base_byte_range + { + recalculated_hunks.next(); + retain = true; + } else { + hunks_to_reexpand.push(HoveredHunk { + status, + multi_buffer_range, + diff_base_byte_range, + }); + } + break; } } } @@ -836,6 +876,26 @@ impl Editor { retain }); + if editor.expanded_hunks.expand_all { + for hunk in recalculated_hunks { + match diff_hunk_to_display(&hunk, &snapshot) { + DisplayDiffHunk::Folded { .. } => {} + DisplayDiffHunk::Unfolded { + diff_base_byte_range, + multi_buffer_range, + status, + .. + } => { + hunks_to_reexpand.push(HoveredHunk { + status, + multi_buffer_range, + diff_base_byte_range, + }); + } + } + } + } + editor.remove_highlighted_rows::(highlights_to_remove, cx); editor.remove_blocks(blocks_to_remove, None, cx); @@ -1000,13 +1060,15 @@ fn editor_with_deleted_text( editor.scroll_manager.set_forbid_vertical_scroll(true); editor.set_read_only(true); editor.set_show_inline_completions(Some(false), cx); - editor.highlight_rows::( + + enum DeletedBlockRowHighlight {} + editor.highlight_rows::( Anchor::min()..Anchor::max(), deleted_color, false, cx, ); - editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); + editor.set_current_line_highlight(Some(CurrentLineHighlight::None)); // editor ._subscriptions .extend([cx.on_blur(&editor.focus_handle, |editor, cx| { @@ -1015,37 +1077,41 @@ fn editor_with_deleted_text( }); })]); - let parent_editor_for_reverts = parent_editor.clone(); let original_multi_buffer_range = hunk.multi_buffer_range.clone(); let diff_base_range = hunk.diff_base_byte_range.clone(); editor - .register_action::(move |_, cx| { - parent_editor_for_reverts - .update(cx, |editor, cx| { - let Some((buffer, original_text)) = - editor.buffer().update(cx, |buffer, cx| { - let (_, buffer, _) = buffer - .excerpt_containing(original_multi_buffer_range.start, cx)?; - let original_text = - buffer.read(cx).diff_base()?.slice(diff_base_range.clone()); - Some((buffer, Arc::from(original_text.to_string()))) - }) - else { - return; - }; - buffer.update(cx, |buffer, cx| { - buffer.edit( - Some(( - original_multi_buffer_range.start.text_anchor - ..original_multi_buffer_range.end.text_anchor, - original_text, - )), - None, - cx, - ) - }); - }) - .ok(); + .register_action::({ + let parent_editor = parent_editor.clone(); + move |_, cx| { + parent_editor + .update(cx, |editor, cx| { + let Some((buffer, original_text)) = + editor.buffer().update(cx, |buffer, cx| { + let (_, buffer, _) = buffer.excerpt_containing( + original_multi_buffer_range.start, + cx, + )?; + let original_text = + buffer.read(cx).diff_base()?.slice(diff_base_range.clone()); + Some((buffer, Arc::from(original_text.to_string()))) + }) + else { + return; + }; + buffer.update(cx, |buffer, cx| { + buffer.edit( + Some(( + original_multi_buffer_range.start.text_anchor + ..original_multi_buffer_range.end.text_anchor, + original_text, + )), + None, + cx, + ) + }); + }) + .ok(); + } }) .detach(); let hunk = hunk.clone(); diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 62e37bc677f5fc..8c8aa710a2f72c 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -63,8 +63,11 @@ impl ProposedChangesEditor { let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded(); Self { - editor: cx - .new_view(|cx| Editor::for_multibuffer(multibuffer.clone(), project, true, cx)), + editor: cx.new_view(|cx| { + let mut editor = Editor::for_multibuffer(multibuffer.clone(), project, true, cx); + editor.set_expand_all_diff_hunks(); + editor + }), recalculate_diffs_tx, _recalculate_diffs_task: cx.spawn(|_, mut cx| async move { let mut buffers_to_diff = HashSet::default(); From 563a1dcbab67b58a6c9cf10fe4dea35d862376d1 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Oct 2024 12:58:21 -0600 Subject: [PATCH 430/762] Fix panic when opening proposed changes editor with reversed ranges (#18599) Closes https://github.com/zed-industries/zed/issues/18589 Release Notes: - N/A Co-authored-by: Antonio --- crates/editor/src/editor.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 54a1318bdfe52c..ddc7de4e41a464 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12257,12 +12257,9 @@ impl Editor { let buffer = self.buffer.read(cx); let mut new_selections_by_buffer = HashMap::default(); for selection in self.selections.all::(cx) { - for (buffer, mut range, _) in + for (buffer, range, _) in buffer.range_to_buffer_ranges(selection.start..selection.end, cx) { - if selection.reversed { - mem::swap(&mut range.start, &mut range.end); - } let mut range = range.to_point(buffer.read(cx)); range.start.column = 0; range.end.column = buffer.read(cx).line_len(range.end.row); From e80cbab93f0b30ce6619f9b37636410bc8caab5d Mon Sep 17 00:00:00 2001 From: Roman Zipp Date: Wed, 2 Oct 2024 13:03:23 +0200 Subject: [PATCH 431/762] Fix docs `format_on_save` value is not a boolean (#18619) Fixed [Configuring Languages](https://zed.dev/docs/configuring-languages) docs using boolean value for `format_on_save` option although it accepts string values of `"on"` or `"off"` Details: The documentation on [configuring languages](https://zed.dev/docs/configuring-languages) states the use of boolean values for the `format_on_save` option although the [configuration reference](https://zed.dev/docs/configuring-zed#format-on-save) only allows the usage of string values `"on"` or `"off"`. In fact using boolean values will not work and won't translate to `on` or `off` Release Notes: - N/A --- docs/src/configuring-languages.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md index 65e9f822b94786..fc6de9efeab630 100644 --- a/docs/src/configuring-languages.md +++ b/docs/src/configuring-languages.md @@ -33,7 +33,7 @@ Here's an example of language-specific settings: "Python": { "tab_size": 4, "formatter": "language_server", - "format_on_save": true + "format_on_save": "on" }, "JavaScript": { "tab_size": 2, @@ -209,11 +209,11 @@ Zed supports both built-in and external formatters. Configure formatters globall "arguments": ["--stdin-filepath", "{buffer_path}"] } }, - "format_on_save": true + "format_on_save": "on" }, "Rust": { "formatter": "language_server", - "format_on_save": true + "format_on_save": "on" } } ``` @@ -225,7 +225,7 @@ To disable formatting for a specific language: ```json "languages": { "Markdown": { - "format_on_save": false + "format_on_save": "off" } } ``` @@ -276,7 +276,7 @@ Zed allows you to run both formatting and linting on save. Here's an example tha "code_actions_on_format": { "source.fixAll.eslint": true }, - "format_on_save": true + "format_on_save": "on" } } ``` From b3cdd2ccff82d0a3d2e33171e22838402f8f3f79 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 2 Oct 2024 13:21:19 +0200 Subject: [PATCH 432/762] ssh remoting: Fix ssh process not being cleaned up when connection is closed (#18623) We introduced a memory leak in #18572, which meant that `Drop` was never called on `SshRemoteConnection`, meaning that the ssh process kept running Co-Authored-by: Thorsten Release Notes: - N/A --------- Co-authored-by: Thorsten --- crates/remote/src/ssh_session.rs | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index fe1e42fe966309..6bca9938baac71 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -31,7 +31,7 @@ use std::{ path::{Path, PathBuf}, sync::{ atomic::{AtomicU32, Ordering::SeqCst}, - Arc, + Arc, Weak, }, time::Instant, }; @@ -244,12 +244,12 @@ struct SshRemoteClientState { ssh_connection: SshRemoteConnection, delegate: Arc, forwarder: ChannelForwarder, - _multiplex_task: Task>, + multiplex_task: Task>, } pub struct SshRemoteClient { client: Arc, - inner_state: Arc>>, + inner_state: Mutex>, } impl SshRemoteClient { @@ -264,7 +264,7 @@ impl SshRemoteClient { let client = cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx))?; let this = Arc::new(Self { client, - inner_state: Arc::new(Mutex::new(None)), + inner_state: Mutex::new(None), }); let inner_state = { @@ -276,7 +276,7 @@ impl SshRemoteClient { .await?; let multiplex_task = Self::multiplex( - this.clone(), + Arc::downgrade(&this), ssh_process, proxy_incoming_tx, proxy_outgoing_rx, @@ -287,7 +287,7 @@ impl SshRemoteClient { ssh_connection, delegate, forwarder: proxy, - _multiplex_task: multiplex_task, + multiplex_task, } }; @@ -305,9 +305,9 @@ impl SshRemoteClient { mut ssh_connection, delegate, forwarder: proxy, - _multiplex_task, + multiplex_task, } = state; - drop(_multiplex_task); + drop(multiplex_task); cx.spawn(|mut cx| async move { let (incoming_tx, outgoing_rx) = proxy.into_channels().await; @@ -331,8 +331,8 @@ impl SshRemoteClient { ssh_connection, delegate, forwarder: proxy, - _multiplex_task: Self::multiplex( - this.clone(), + multiplex_task: Self::multiplex( + Arc::downgrade(&this), ssh_process, proxy_incoming_tx, proxy_outgoing_rx, @@ -349,7 +349,7 @@ impl SshRemoteClient { } fn multiplex( - this: Arc, + this: Weak, mut ssh_process: Child, incoming_tx: UnboundedSender, mut outgoing_rx: UnboundedReceiver, @@ -444,7 +444,9 @@ impl SshRemoteClient { if let Err(error) = result { log::warn!("ssh io task died with error: {:?}. reconnecting...", error); - Self::reconnect(this, &mut cx).ok(); + if let Some(this) = this.upgrade() { + Self::reconnect(this, &mut cx).ok(); + } } Ok(()) @@ -516,7 +518,7 @@ impl SshRemoteClient { let client = ChannelClient::new(server_to_client_rx, client_to_server_tx, cx); Arc::new(Self { client, - inner_state: Arc::new(Mutex::new(None)), + inner_state: Mutex::new(None), }) }), server_cx.update(|cx| ChannelClient::new(client_to_server_rx, server_to_client_tx, cx)), From 0ee1d7ab2634521e11cb7f221074b0287763359d Mon Sep 17 00:00:00 2001 From: loczek <30776250+loczek@users.noreply.github.com> Date: Wed, 2 Oct 2024 13:27:16 +0200 Subject: [PATCH 433/762] Add snippet commands (#18453) Closes #17860 Closes #15403 Release Notes: - Added `snippets: configure snippets` command to create and modify snippets - Added `snippets: open folder` command for opening the `~/.config/zed/snippets` directory https://github.com/user-attachments/assets/fd9e664c-44b1-49bf-87a8-42b9e516f12f --- Cargo.lock | 15 ++ Cargo.toml | 2 + crates/snippets_ui/Cargo.toml | 22 +++ crates/snippets_ui/LICENSE-GPL | 1 + crates/snippets_ui/src/snippets_ui.rs | 226 ++++++++++++++++++++++++++ crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 1 + 7 files changed, 268 insertions(+) create mode 100644 crates/snippets_ui/Cargo.toml create mode 120000 crates/snippets_ui/LICENSE-GPL create mode 100644 crates/snippets_ui/src/snippets_ui.rs diff --git a/Cargo.lock b/Cargo.lock index 123141d188e0eb..7c92ef0f5257c7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10500,6 +10500,20 @@ dependencies = [ "util", ] +[[package]] +name = "snippets_ui" +version = "0.1.0" +dependencies = [ + "fuzzy", + "gpui", + "language", + "paths", + "picker", + "ui", + "util", + "workspace", +] + [[package]] name = "socket2" version = "0.4.10" @@ -14468,6 +14482,7 @@ dependencies = [ "simplelog", "smol", "snippet_provider", + "snippets_ui", "supermaven", "sysinfo", "tab_switcher", diff --git a/Cargo.toml b/Cargo.toml index c72fec020fe678..1ef14dae70c202 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -99,6 +99,7 @@ members = [ "crates/settings_ui", "crates/snippet", "crates/snippet_provider", + "crates/snippets_ui", "crates/sqlez", "crates/sqlez_macros", "crates/story", @@ -275,6 +276,7 @@ settings = { path = "crates/settings" } settings_ui = { path = "crates/settings_ui" } snippet = { path = "crates/snippet" } snippet_provider = { path = "crates/snippet_provider" } +snippets_ui = { path = "crates/snippets_ui" } sqlez = { path = "crates/sqlez" } sqlez_macros = { path = "crates/sqlez_macros" } story = { path = "crates/story" } diff --git a/crates/snippets_ui/Cargo.toml b/crates/snippets_ui/Cargo.toml new file mode 100644 index 00000000000000..da9eff4ae55bad --- /dev/null +++ b/crates/snippets_ui/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "snippets_ui" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/snippets_ui.rs" + +[dependencies] +fuzzy.workspace = true +gpui.workspace = true +language.workspace = true +paths.workspace = true +picker.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true diff --git a/crates/snippets_ui/LICENSE-GPL b/crates/snippets_ui/LICENSE-GPL new file mode 120000 index 00000000000000..89e542f750cd38 --- /dev/null +++ b/crates/snippets_ui/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/snippets_ui/src/snippets_ui.rs b/crates/snippets_ui/src/snippets_ui.rs new file mode 100644 index 00000000000000..c8ab6febdaa076 --- /dev/null +++ b/crates/snippets_ui/src/snippets_ui.rs @@ -0,0 +1,226 @@ +use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; +use gpui::{ + actions, AppContext, DismissEvent, EventEmitter, FocusableView, ParentElement, Render, Styled, + View, ViewContext, VisualContext, WeakView, +}; +use language::LanguageRegistry; +use paths::config_dir; +use picker::{Picker, PickerDelegate}; +use std::{borrow::Borrow, fs, sync::Arc}; +use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing, WindowContext}; +use util::ResultExt; +use workspace::{notifications::NotifyResultExt, ModalView, Workspace}; + +actions!(snippets, [ConfigureSnippets, OpenFolder]); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(register).detach(); +} + +fn register(workspace: &mut Workspace, _: &mut ViewContext) { + workspace.register_action(configure_snippets); + workspace.register_action(open_folder); +} + +fn configure_snippets( + workspace: &mut Workspace, + _: &ConfigureSnippets, + cx: &mut ViewContext, +) { + let language_registry = workspace.app_state().languages.clone(); + let workspace_handle = workspace.weak_handle(); + + workspace.toggle_modal(cx, move |cx| { + ScopeSelector::new(language_registry, workspace_handle, cx) + }); +} + +fn open_folder(workspace: &mut Workspace, _: &OpenFolder, cx: &mut ViewContext) { + fs::create_dir_all(config_dir().join("snippets")).notify_err(workspace, cx); + cx.open_with_system(config_dir().join("snippets").borrow()); +} + +pub struct ScopeSelector { + picker: View>, +} + +impl ScopeSelector { + fn new( + language_registry: Arc, + workspace: WeakView, + cx: &mut ViewContext, + ) -> Self { + let delegate = + ScopeSelectorDelegate::new(workspace, cx.view().downgrade(), language_registry); + + let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx)); + + Self { picker } + } +} + +impl ModalView for ScopeSelector {} + +impl EventEmitter for ScopeSelector {} + +impl FocusableView for ScopeSelector { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl Render for ScopeSelector { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + v_flex().w(rems(34.)).child(self.picker.clone()) + } +} + +pub struct ScopeSelectorDelegate { + workspace: WeakView, + scope_selector: WeakView, + language_registry: Arc, + candidates: Vec, + matches: Vec, + selected_index: usize, +} + +impl ScopeSelectorDelegate { + fn new( + workspace: WeakView, + scope_selector: WeakView, + language_registry: Arc, + ) -> Self { + let candidates = Vec::from(["Global".to_string()]).into_iter(); + let languages = language_registry.language_names().into_iter(); + + let candidates = candidates + .chain(languages) + .enumerate() + .map(|(candidate_id, name)| StringMatchCandidate::new(candidate_id, name)) + .collect::>(); + + Self { + workspace, + scope_selector, + language_registry, + candidates, + matches: vec![], + selected_index: 0, + } + } +} + +impl PickerDelegate for ScopeSelectorDelegate { + type ListItem = ListItem; + + fn placeholder_text(&self, _: &mut WindowContext) -> Arc { + "Select snippet scope...".into() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn confirm(&mut self, _: bool, cx: &mut ViewContext>) { + if let Some(mat) = self.matches.get(self.selected_index) { + let scope_name = self.candidates[mat.candidate_id].string.clone(); + let language = self.language_registry.language_for_name(&scope_name); + + if let Some(workspace) = self.workspace.upgrade() { + cx.spawn(|_, mut cx| async move { + let scope = match scope_name.as_str() { + "Global" => "snippets".to_string(), + _ => language.await?.lsp_id(), + }; + + workspace.update(&mut cx, |workspace, cx| { + workspace + .open_abs_path( + config_dir().join("snippets").join(scope + ".json"), + false, + cx, + ) + .detach(); + }) + }) + .detach_and_log_err(cx); + }; + } + self.dismissed(cx); + } + + fn dismissed(&mut self, cx: &mut ViewContext>) { + self.scope_selector + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext>) { + self.selected_index = ix; + } + + fn update_matches( + &mut self, + query: String, + cx: &mut ViewContext>, + ) -> gpui::Task<()> { + let background = cx.background_executor().clone(); + let candidates = self.candidates.clone(); + cx.spawn(|this, mut cx| async move { + let matches = if query.is_empty() { + candidates + .into_iter() + .enumerate() + .map(|(index, candidate)| StringMatch { + candidate_id: index, + string: candidate.string, + positions: Vec::new(), + score: 0.0, + }) + .collect() + } else { + match_strings( + &candidates, + &query, + false, + 100, + &Default::default(), + background, + ) + .await + }; + + this.update(&mut cx, |this, cx| { + let delegate = &mut this.delegate; + delegate.matches = matches; + delegate.selected_index = delegate + .selected_index + .min(delegate.matches.len().saturating_sub(1)); + cx.notify(); + }) + .log_err(); + }) + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _: &mut ViewContext>, + ) -> Option { + let mat = &self.matches[ix]; + let label = mat.string.clone(); + + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .child(HighlightedLabel::new(label, mat.positions.clone())), + ) + } +} diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 5422f8b29aa98d..775a59e475cb47 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -96,6 +96,7 @@ shellexpand.workspace = true simplelog.workspace = true smol.workspace = true snippet_provider.workspace = true +snippets_ui.workspace = true supermaven.workspace = true sysinfo.workspace = true tab_switcher.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 186805d12cd01d..06f1d926aea37d 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -256,6 +256,7 @@ fn init_ui( project_panel::init(Assets, cx); outline_panel::init(Assets, cx); tasks_ui::init(cx); + snippets_ui::init(cx); channel::init(&app_state.client.clone(), app_state.user_store.clone(), cx); search::init(cx); vim::init(cx); From fd94c2b3fd5ceac67ff852d1781b4c707f5ed487 Mon Sep 17 00:00:00 2001 From: Patrick <39161540+patrickJramos@users.noreply.github.com> Date: Wed, 2 Oct 2024 08:44:42 -0300 Subject: [PATCH 434/762] Keep tab position when closing tabs (#18168) - Closes #18036 Release Notes: - N/A --- crates/workspace/src/pane.rs | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 82300690e7dbe1..3e228b7b1602b3 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1407,17 +1407,13 @@ impl Pane { self.pinned_tab_count -= 1; } if item_index == self.active_item_index { - let index_to_activate = self - .activation_history - .pop() - .and_then(|last_activated_item| { - self.items.iter().enumerate().find_map(|(index, item)| { - (item.item_id() == last_activated_item.entity_id).then_some(index) - }) - }) - // We didn't have a valid activation history entry, so fallback - // to activating the item to the left - .unwrap_or_else(|| item_index.min(self.items.len()).saturating_sub(1)); + self.activation_history.pop(); + + let index_to_activate = if item_index + 1 < self.items.len() { + item_index + 1 + } else { + item_index.saturating_sub(1) + }; let should_activate = activate_pane || self.has_focus(cx); if self.items.len() == 1 && should_activate { @@ -3320,7 +3316,7 @@ mod tests { .unwrap() .await .unwrap(); - assert_item_labels(&pane, ["A", "B*", "C", "D"], cx); + assert_item_labels(&pane, ["A", "B", "C*", "D"], cx); pane.update(cx, |pane, cx| pane.activate_item(3, false, false, cx)); assert_item_labels(&pane, ["A", "B", "C", "D*"], cx); @@ -3331,7 +3327,7 @@ mod tests { .unwrap() .await .unwrap(); - assert_item_labels(&pane, ["A", "B*", "C"], cx); + assert_item_labels(&pane, ["A", "B", "C*"], cx); pane.update(cx, |pane, cx| { pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) @@ -3339,7 +3335,7 @@ mod tests { .unwrap() .await .unwrap(); - assert_item_labels(&pane, ["A", "C*"], cx); + assert_item_labels(&pane, ["A", "B*"], cx); pane.update(cx, |pane, cx| { pane.close_active_item(&CloseActiveItem { save_intent: None }, cx) From e01bc6765db7352559d20d3b0f18b124dc4707d8 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 2 Oct 2024 13:45:07 +0200 Subject: [PATCH 435/762] editor: Fix "Reveal in File Manager" not working with multibuffers (#18626) Additionally, mark context menu entry as disabled when the action would fail (untitled buffer, collab sessions). Supersedes #18584 Release Notes: - Fixed "Reveal in Finder/File Manager", "Copy Path", "Copy Relative Path" and "Copy file location" actions not working with multibuffers. --- crates/editor/src/editor.rs | 40 ++++++++++++------------- crates/editor/src/mouse_context_menu.rs | 18 +++++++---- 2 files changed, 33 insertions(+), 25 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ddc7de4e41a464..525a94f2582ab1 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11257,30 +11257,32 @@ impl Editor { None } + fn target_file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn language::LocalFile> { + self.active_excerpt(cx)? + .1 + .read(cx) + .file() + .and_then(|f| f.as_local()) + } + pub fn reveal_in_finder(&mut self, _: &RevealInFileManager, cx: &mut ViewContext) { - if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - cx.reveal_path(&file.abs_path(cx)); - } + if let Some(target) = self.target_file(cx) { + cx.reveal_path(&target.abs_path(cx)); } } pub fn copy_path(&mut self, _: &CopyPath, cx: &mut ViewContext) { - if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - if let Some(path) = file.abs_path(cx).to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); - } + if let Some(file) = self.target_file(cx) { + if let Some(path) = file.abs_path(cx).to_str() { + cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); } } } pub fn copy_relative_path(&mut self, _: &CopyRelativePath, cx: &mut ViewContext) { - if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - if let Some(path) = file.path().to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); - } + if let Some(file) = self.target_file(cx) { + if let Some(path) = file.path().to_str() { + cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); } } } @@ -11491,12 +11493,10 @@ impl Editor { } pub fn copy_file_location(&mut self, _: &CopyFileLocation, cx: &mut ViewContext) { - if let Some(buffer) = self.buffer().read(cx).as_singleton() { - if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { - if let Some(path) = file.path().to_str() { - let selection = self.selections.newest::(cx).start.row + 1; - cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); - } + if let Some(file) = self.target_file(cx) { + if let Some(path) = file.path().to_str() { + let selection = self.selections.newest::(cx).start.row + 1; + cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); } } } diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs index 239d7955a056b4..936d95dccbf8c2 100644 --- a/crates/editor/src/mouse_context_menu.rs +++ b/crates/editor/src/mouse_context_menu.rs @@ -158,6 +158,12 @@ pub fn deploy_context_menu( } let focus = cx.focused(); + let has_reveal_target = editor.target_file(cx).is_some(); + let reveal_in_finder_label = if cfg!(target_os = "macos") { + "Reveal in Finder" + } else { + "Reveal in File Manager" + }; ui::ContextMenu::build(cx, |menu, _cx| { let builder = menu .on_blur_subscription(Subscription::new(|| {})) @@ -180,11 +186,13 @@ pub fn deploy_context_menu( .action("Copy", Box::new(Copy)) .action("Paste", Box::new(Paste)) .separator() - .when(cfg!(target_os = "macos"), |builder| { - builder.action("Reveal in Finder", Box::new(RevealInFileManager)) - }) - .when(cfg!(not(target_os = "macos")), |builder| { - builder.action("Reveal in File Manager", Box::new(RevealInFileManager)) + .map(|builder| { + if has_reveal_target { + builder.action(reveal_in_finder_label, Box::new(RevealInFileManager)) + } else { + builder + .disabled_action(reveal_in_finder_label, Box::new(RevealInFileManager)) + } }) .action("Open in Terminal", Box::new(OpenInTerminal)) .action("Copy Permalink", Box::new(CopyPermalinkToLine)); From 82d3fcdf4b97566357633801df047027240aa286 Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Wed, 2 Oct 2024 09:29:11 -0400 Subject: [PATCH 436/762] Tweak assistant prompt to only fix diagnostic issues when requested to do so (#18596) Release Notes: - Assistant: Make the model less likely to incorporate diagnostic information when not requested to fix any issues. ![CleanShot 2024-10-01 at 13 44 08](https://github.com/user-attachments/assets/f0e9a132-6cac-4dc6-889f-467e59ec8bbc) --- assets/prompts/content_prompt.hbs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/assets/prompts/content_prompt.hbs b/assets/prompts/content_prompt.hbs index e944e230f56f9b..c029f84b24c36e 100644 --- a/assets/prompts/content_prompt.hbs +++ b/assets/prompts/content_prompt.hbs @@ -50,6 +50,9 @@ And here's the section to rewrite based on that prompt again for reference: {{#if diagnostic_errors}} {{#each diagnostic_errors}} + +Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to. + {{line_number}} {{error_message}} From 8a18c94f33fc36c89016c8f606a20632ce63b472 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 2 Oct 2024 15:35:50 +0200 Subject: [PATCH 437/762] Make slash command descriptions consistent (#18595) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds a description constant in most of the slash command files so that both the editor _and_ footer pickers use the same string. In terms of copywriting, I did some tweaking to reduce the longer ones a bit. Also standardized them all to use sentence case, as opposed to each instance using a different convention. The editor picker needs more work, though, given the arguments and descriptions are being cut at the moment. This should happen in a follow-up! Screenshot 2024-10-01 at 7 25 19 PM --- Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- crates/assistant/src/slash_command/auto_command.rs | 4 ++-- crates/assistant/src/slash_command/delta_command.rs | 4 ++-- crates/assistant/src/slash_command/diagnostics_command.rs | 2 +- crates/assistant/src/slash_command/fetch_command.rs | 4 ++-- crates/assistant/src/slash_command/file_command.rs | 4 ++-- crates/assistant/src/slash_command/now_command.rs | 4 ++-- crates/assistant/src/slash_command/project_command.rs | 4 ++-- crates/assistant/src/slash_command/prompt_command.rs | 4 ++-- crates/assistant/src/slash_command/search_command.rs | 4 ++-- crates/assistant/src/slash_command/symbols_command.rs | 4 ++-- crates/assistant/src/slash_command/tab_command.rs | 4 ++-- crates/assistant/src/slash_command/terminal_command.rs | 4 ++-- crates/assistant/src/slash_command/workflow_command.rs | 4 ++-- crates/assistant/src/slash_command_picker.rs | 6 ++++-- 14 files changed, 29 insertions(+), 27 deletions(-) diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs index 14cee296820989..14bbb7c8412b41 100644 --- a/crates/assistant/src/slash_command/auto_command.rs +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -31,11 +31,11 @@ impl SlashCommand for AutoCommand { } fn description(&self) -> String { - "Automatically infer what context to add, based on your prompt".into() + "Automatically infer what context to add".into() } fn menu_text(&self) -> String { - "Automatically Infer Context".into() + self.description() } fn label(&self, cx: &AppContext) -> CodeLabel { diff --git a/crates/assistant/src/slash_command/delta_command.rs b/crates/assistant/src/slash_command/delta_command.rs index 6a66ad3f09aa9a..6f697ecbb9bcba 100644 --- a/crates/assistant/src/slash_command/delta_command.rs +++ b/crates/assistant/src/slash_command/delta_command.rs @@ -19,11 +19,11 @@ impl SlashCommand for DeltaSlashCommand { } fn description(&self) -> String { - "re-insert changed files".into() + "Re-insert changed files".into() } fn menu_text(&self) -> String { - "Re-insert Changed Files".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 3f79c016750318..146a4e5d366dd3 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -95,7 +95,7 @@ impl SlashCommand for DiagnosticsSlashCommand { } fn menu_text(&self) -> String { - "Insert Diagnostics".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/fetch_command.rs b/crates/assistant/src/slash_command/fetch_command.rs index 23d3c884a8ec28..3a01bb645a36bb 100644 --- a/crates/assistant/src/slash_command/fetch_command.rs +++ b/crates/assistant/src/slash_command/fetch_command.rs @@ -104,11 +104,11 @@ impl SlashCommand for FetchSlashCommand { } fn description(&self) -> String { - "insert URL contents".into() + "Insert fetched URL contents".into() } fn menu_text(&self) -> String { - "Insert fetched URL contents".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index 260c6b0e2a0840..6da56d064178ad 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -110,11 +110,11 @@ impl SlashCommand for FileSlashCommand { } fn description(&self) -> String { - "insert file".into() + "Insert file".into() } fn menu_text(&self) -> String { - "Insert File".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/now_command.rs b/crates/assistant/src/slash_command/now_command.rs index eb0ca926f015b6..221ba05cafc623 100644 --- a/crates/assistant/src/slash_command/now_command.rs +++ b/crates/assistant/src/slash_command/now_command.rs @@ -19,11 +19,11 @@ impl SlashCommand for NowSlashCommand { } fn description(&self) -> String { - "insert the current date and time".into() + "Insert current date and time".into() } fn menu_text(&self) -> String { - "Insert Current Date and Time".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 197e91d91addac..58fef8f338771d 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -47,11 +47,11 @@ impl SlashCommand for ProjectSlashCommand { } fn description(&self) -> String { - "Generate semantic searches based on the current context".into() + "Generate a semantic search based on context".into() } fn menu_text(&self) -> String { - "Project Context".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/prompt_command.rs b/crates/assistant/src/slash_command/prompt_command.rs index effbcc0f90ce87..978c6d7504caeb 100644 --- a/crates/assistant/src/slash_command/prompt_command.rs +++ b/crates/assistant/src/slash_command/prompt_command.rs @@ -16,11 +16,11 @@ impl SlashCommand for PromptSlashCommand { } fn description(&self) -> String { - "insert prompt from library".into() + "Insert prompt from library".into() } fn menu_text(&self) -> String { - "Insert Prompt from Library".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index f0f3ee3d25c660..c7183e95bbc853 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -34,11 +34,11 @@ impl SlashCommand for SearchSlashCommand { } fn description(&self) -> String { - "semantic search".into() + "Search your project semantically".into() } fn menu_text(&self) -> String { - "Semantic Search".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/symbols_command.rs b/crates/assistant/src/slash_command/symbols_command.rs index 1cf8536c0dbfe7..887b57ba9956c7 100644 --- a/crates/assistant/src/slash_command/symbols_command.rs +++ b/crates/assistant/src/slash_command/symbols_command.rs @@ -17,11 +17,11 @@ impl SlashCommand for OutlineSlashCommand { } fn description(&self) -> String { - "insert symbols for active tab".into() + "Insert symbols for active tab".into() } fn menu_text(&self) -> String { - "Insert Symbols for Active Tab".into() + self.description() } fn complete_argument( diff --git a/crates/assistant/src/slash_command/tab_command.rs b/crates/assistant/src/slash_command/tab_command.rs index bdf8450d43be81..0bff4730d8e5c8 100644 --- a/crates/assistant/src/slash_command/tab_command.rs +++ b/crates/assistant/src/slash_command/tab_command.rs @@ -24,11 +24,11 @@ impl SlashCommand for TabSlashCommand { } fn description(&self) -> String { - "insert open tabs (active tab by default)".to_owned() + "Insert open tabs (active tab by default)".to_owned() } fn menu_text(&self) -> String { - "Insert Open Tabs".to_owned() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/terminal_command.rs b/crates/assistant/src/slash_command/terminal_command.rs index 1d0293c235d442..1d4959fb199572 100644 --- a/crates/assistant/src/slash_command/terminal_command.rs +++ b/crates/assistant/src/slash_command/terminal_command.rs @@ -29,11 +29,11 @@ impl SlashCommand for TerminalSlashCommand { } fn description(&self) -> String { - "insert terminal output".into() + "Insert terminal output".into() } fn menu_text(&self) -> String { - "Insert Terminal Output".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command/workflow_command.rs b/crates/assistant/src/slash_command/workflow_command.rs index c66dd9bebff144..071b4feaf436e3 100644 --- a/crates/assistant/src/slash_command/workflow_command.rs +++ b/crates/assistant/src/slash_command/workflow_command.rs @@ -29,11 +29,11 @@ impl SlashCommand for WorkflowSlashCommand { } fn description(&self) -> String { - "insert a prompt that opts into the edit workflow".into() + "Insert prompt to opt into the edit workflow".into() } fn menu_text(&self) -> String { - "Insert Workflow Prompt".into() + self.description() } fn requires_argument(&self) -> bool { diff --git a/crates/assistant/src/slash_command_picker.rs b/crates/assistant/src/slash_command_picker.rs index 4b57dcfb3306c5..58023848b0e508 100644 --- a/crates/assistant/src/slash_command_picker.rs +++ b/crates/assistant/src/slash_command_picker.rs @@ -184,7 +184,7 @@ impl PickerDelegate for SlashCommandDelegate { h_flex() .group(format!("command-entry-label-{ix}")) .w_full() - .min_w(px(220.)) + .min_w(px(250.)) .child( v_flex() .child( @@ -203,7 +203,9 @@ impl PickerDelegate for SlashCommandDelegate { div() .font_buffer(cx) .child( - Label::new(args).size(LabelSize::Small), + Label::new(args) + .size(LabelSize::Small) + .color(Color::Muted), ) .visible_on_hover(format!( "command-entry-label-{ix}" From 21336eb12491e3b505437c8d3fa99e7aaf64460f Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 10:10:53 -0400 Subject: [PATCH 438/762] docs: Add note about forking the extensions repo to a personal GitHub account (#18631) This PR adds a note to the docs encouraging folks to fork the `zed-industries/extensions` repo to a personal GitHub account rather than a GitHub organization, as this makes life easier for everyone. Release Notes: - N/A --- docs/src/extensions/developing-extensions.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index c1330a4c49eef4..503d253fc33bb3 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -86,6 +86,8 @@ If you already have a published extension with the same name installed, your dev To publish an extension, open a PR to [the `zed-industries/extensions` repo](https://github.com/zed-industries/extensions). +> Note: It is very helpful if you fork the `zed-industries/extensions` repo to a personal GitHub account instead of a GitHub organization, as this allows Zed staff to push any needed changes to your PR to expedite the publishing process. + In your PR, do the following: 1. Add your extension as a Git submodule within the `extensions/` directory From cfd61f933773ff152d3520cba84f6ac69453c7bf Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 10:38:23 -0400 Subject: [PATCH 439/762] Clean up formatting in `Cargo.toml` (#18632) This PR cleans up some formatting in some `Cargo.toml` files. Release Notes: - N/A --- crates/languages/Cargo.toml | 35 +++++++++++++++++------------------ crates/util/Cargo.toml | 8 ++++---- crates/worktree/Cargo.toml | 8 ++++---- crates/zed/Cargo.toml | 2 +- 4 files changed, 26 insertions(+), 27 deletions(-) diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 5cb5455dd168d6..19842efac20cf7 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -46,6 +46,7 @@ lsp.workspace = true node_runtime.workspace = true paths.workspace = true project.workspace = true +protols-tree-sitter-proto = { workspace = true, optional = true } regex.workspace = true rope.workspace = true rust-embed.workspace = true @@ -55,26 +56,24 @@ settings.workspace = true smol.workspace = true task.workspace = true toml.workspace = true +tree-sitter = { workspace = true, optional = true } +tree-sitter-bash = { workspace = true, optional = true } +tree-sitter-c = { workspace = true, optional = true } +tree-sitter-cpp = { workspace = true, optional = true } +tree-sitter-css = { workspace = true, optional = true } +tree-sitter-go = { workspace = true, optional = true } +tree-sitter-go-mod = { workspace = true, optional = true } +tree-sitter-gowork = { workspace = true, optional = true } +tree-sitter-jsdoc = { workspace = true, optional = true } +tree-sitter-json = { workspace = true, optional = true } +tree-sitter-md = { workspace = true, optional = true } +tree-sitter-python = { workspace = true, optional = true } +tree-sitter-regex = { workspace = true, optional = true } +tree-sitter-rust = { workspace = true, optional = true } +tree-sitter-typescript = { workspace = true, optional = true } +tree-sitter-yaml = { workspace = true, optional = true } util.workspace = true -tree-sitter-bash = {workspace = true, optional = true} -tree-sitter-c = {workspace = true, optional = true} -tree-sitter-cpp = {workspace = true, optional = true} -tree-sitter-css = {workspace = true, optional = true} -tree-sitter-go = {workspace = true, optional = true} -tree-sitter-go-mod = {workspace = true, optional = true} -tree-sitter-gowork = {workspace = true, optional = true} -tree-sitter-jsdoc = {workspace = true, optional = true} -tree-sitter-json = {workspace = true, optional = true} -tree-sitter-md = {workspace = true, optional = true} -protols-tree-sitter-proto = {workspace = true, optional = true} -tree-sitter-python = {workspace = true, optional = true} -tree-sitter-regex = {workspace = true, optional = true} -tree-sitter-rust = {workspace = true, optional = true} -tree-sitter-typescript = {workspace = true, optional = true} -tree-sitter-yaml = {workspace = true, optional = true} -tree-sitter = {workspace = true, optional = true} - [dev-dependencies] text.workspace = true theme = { workspace = true, features = ["test-support"] } diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 6257ffd64269f4..58c4686bf90522 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -17,19 +17,19 @@ test-support = ["tempfile", "git2", "rand"] [dependencies] anyhow.workspace = true +async-fs.workspace = true collections.workspace = true dirs.workspace = true +futures-lite.workspace = true futures.workspace = true git2 = { workspace = true, optional = true } globset.workspace = true log.workspace = true -rand = {workspace = true, optional = true} +rand = { workspace = true, optional = true } regex.workspace = true rust-embed.workspace = true serde.workspace = true serde_json.workspace = true -async-fs.workspace = true -futures-lite.workspace = true take-until = "0.2.0" tempfile = { workspace = true, optional = true } unicase.workspace = true @@ -39,5 +39,5 @@ tendril = "0.4.3" [dev-dependencies] git2.workspace = true -tempfile.workspace = true rand.workspace = true +tempfile.workspace = true diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 1186c988adb17c..41221d7b6e8186 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -48,12 +48,12 @@ text.workspace = true util.workspace = true [dev-dependencies] -clock = {workspace = true, features = ["test-support"]} +clock = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } env_logger.workspace = true git2.workspace = true -gpui = {workspace = true, features = ["test-support"]} +gpui = { workspace = true, features = ["test-support"] } http_client.workspace = true -rand.workspace = true -settings = {workspace = true, features = ["test-support"]} pretty_assertions.workspace = true +rand.workspace = true +settings = { workspace = true, features = ["test-support"] } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 775a59e475cb47..04e2be7ed02666 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -63,7 +63,7 @@ language.workspace = true language_model.workspace = true language_selector.workspace = true language_tools.workspace = true -languages = {workspace = true, features = ["load-grammars"] } +languages = { workspace = true, features = ["load-grammars"] } libc.workspace = true log.workspace = true markdown_preview.workspace = true From 028d7a624f24a143858b316bee7edc69911918ee Mon Sep 17 00:00:00 2001 From: Joseph T Lyons Date: Wed, 2 Oct 2024 11:03:57 -0400 Subject: [PATCH 440/762] v0.157.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7c92ef0f5257c7..b69e4541cc9e20 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14398,7 +14398,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.156.0" +version = "0.157.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 04e2be7ed02666..e22f75f5bb3e54 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.156.0" +version = "0.157.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From 2cd12f84def5bf4a05ca4e19138d33e4e1d6073c Mon Sep 17 00:00:00 2001 From: Victor Roetman Date: Wed, 2 Oct 2024 12:18:41 -0400 Subject: [PATCH 441/762] docs: Add FIPS mode error to Linux troubleshooting (#18407) - Closes: #18335 Update linux.md with a workaround for the ``` crypto/fips/fips.c:154: OpenSSL internal error: FATAL FIPS SELFTEST FAILURE ``` error when using bundled libssl and libcrypto. Co-authored-by: Peter Tripp --- docs/src/linux.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/src/linux.md b/docs/src/linux.md index 33d12d0a8ca02f..4abd7de8ba8ebd 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -144,3 +144,20 @@ If you are seeing "too many open files" then first try `sysctl fs.inotify`. - You should see that `max_user_watches` is 8000 or higher (you can change the limit with `sudo sysctl fs.inotify.max_user_watches=64000`). Zed needs one watch per directory in all your open projects + one per git repository + a handful more for settings, themes, keymaps, extensions. It is also possible that you are running out of file descriptors. You can check the limits with `ulimit` and update them by editing `/etc/security/limits.conf`. + +### FIPS Mode OpenSSL internal error {#fips} + +If your machine is running in FIPS mode (`cat /proc/sys/crypto/fips_enabled` is set to `1`) Zed may fail to start and output the following when launched with `zed --foreground`: + +``` +crypto/fips/fips.c:154: OpenSSL internal error: FATAL FIPS SELFTEST FAILURE +``` + +As a workaround, remove the bundled `libssl` and `libcrypto` libraries from the `zed.app/lib` directory: + +``` +rm ~/.local/zed.app/lib/libssl.so.1.1 +rm ~/.local/zed.app/lib/libcrypto.so.1.1 +``` + +This will force zed to fallback to the system `libssl` and `libcrypto` libraries. From 167af4bc1d3acf3bc2b7d624983ce52602d9bc08 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 12:33:13 -0400 Subject: [PATCH 442/762] Use `const` over `static` for string literals (#18635) I noticed a few places where we were storing `&'static str`s in `static`s instead of `const`s. This PR updates them to use `const`. Release Notes: - N/A --- crates/client/src/client.rs | 2 +- crates/collab/src/api/events.rs | 2 +- crates/editor/src/clangd_ext.rs | 2 +- crates/editor/src/rust_analyzer_ext.rs | 2 +- crates/gpui/src/taffy.rs | 2 +- crates/task/src/vscode_format.rs | 4 ++-- crates/worktree/src/worktree.rs | 2 +- crates/zed/src/reliability.rs | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 48bd646d8aa70e..d565d620c3c206 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1752,7 +1752,7 @@ impl CredentialsProvider for KeychainCredentialsProvider { } /// prefix for the zed:// url scheme -pub static ZED_URL_SCHEME: &str = "zed"; +pub const ZED_URL_SCHEME: &str = "zed"; /// Parses the given link into a Zed link. /// diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 377741f434c2f8..bbfa69c0b8f70b 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -23,7 +23,7 @@ use telemetry_events::{ }; use uuid::Uuid; -static CRASH_REPORTS_BUCKET: &str = "zed-crash-reports"; +const CRASH_REPORTS_BUCKET: &str = "zed-crash-reports"; pub fn router() -> Router { Router::new() diff --git a/crates/editor/src/clangd_ext.rs b/crates/editor/src/clangd_ext.rs index 2f0f7aaee47e7a..501f81b1073df2 100644 --- a/crates/editor/src/clangd_ext.rs +++ b/crates/editor/src/clangd_ext.rs @@ -9,7 +9,7 @@ use crate::lsp_ext::find_specific_language_server_in_selection; use crate::{element::register_action, Editor, SwitchSourceHeader}; -static CLANGD_SERVER_NAME: &str = "clangd"; +const CLANGD_SERVER_NAME: &str = "clangd"; fn is_c_language(language: &Language) -> bool { return language.name() == "C++".into() || language.name() == "C".into(); diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index db17eaab282fae..fa39e5c9d49ce9 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -10,7 +10,7 @@ use crate::{ ExpandMacroRecursively, }; -static RUST_ANALYZER_NAME: &str = "rust-analyzer"; +const RUST_ANALYZER_NAME: &str = "rust-analyzer"; fn is_rust_language(language: &Language) -> bool { language.name() == "Rust".into() diff --git a/crates/gpui/src/taffy.rs b/crates/gpui/src/taffy.rs index a6a61031c9ce13..a80c734b81bbfc 100644 --- a/crates/gpui/src/taffy.rs +++ b/crates/gpui/src/taffy.rs @@ -24,7 +24,7 @@ pub struct TaffyLayoutEngine { nodes_to_measure: FxHashMap, } -static EXPECT_MESSAGE: &str = "we should avoid taffy layout errors by construction if possible"; +const EXPECT_MESSAGE: &str = "we should avoid taffy layout errors by construction if possible"; impl TaffyLayoutEngine { pub fn new() -> Self { diff --git a/crates/task/src/vscode_format.rs b/crates/task/src/vscode_format.rs index 74be56b5b17695..c150ee807fb258 100644 --- a/crates/task/src/vscode_format.rs +++ b/crates/task/src/vscode_format.rs @@ -200,7 +200,7 @@ mod tests { #[test] fn can_deserialize_ts_tasks() { - static TYPESCRIPT_TASKS: &str = include_str!("../test_data/typescript.json"); + const TYPESCRIPT_TASKS: &str = include_str!("../test_data/typescript.json"); let vscode_definitions: VsCodeTaskFile = serde_json_lenient::from_str(TYPESCRIPT_TASKS).unwrap(); @@ -290,7 +290,7 @@ mod tests { #[test] fn can_deserialize_rust_analyzer_tasks() { - static RUST_ANALYZER_TASKS: &str = include_str!("../test_data/rust-analyzer.json"); + const RUST_ANALYZER_TASKS: &str = include_str!("../test_data/rust-analyzer.json"); let vscode_definitions: VsCodeTaskFile = serde_json_lenient::from_str(RUST_ANALYZER_TASKS).unwrap(); let expected = vec![ diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index a3daf8ea2eade7..40cd465d9ba7d5 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -351,7 +351,7 @@ pub enum Event { DeletedEntry(ProjectEntryId), } -static EMPTY_PATH: &str = ""; +const EMPTY_PATH: &str = ""; impl EventEmitter for Worktree {} diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 50e5a05b823ed0..b40bbc78bdb479 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -441,7 +441,7 @@ async fn upload_previous_panics( Ok::<_, anyhow::Error>(most_recent_panic) } -static LAST_CRASH_UPLOADED: &str = "LAST_CRASH_UPLOADED"; +const LAST_CRASH_UPLOADED: &str = "LAST_CRASH_UPLOADED"; /// upload crashes from apple's diagnostic reports to our server. /// (only if telemetry is enabled) From 845991c0e59a34e2d98300237956ede553c44289 Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Thu, 3 Oct 2024 01:35:35 +0900 Subject: [PATCH 443/762] docs: Add missing UI font settings to "Configuring Zed" (#18267) - Add missing `ui_font` options in `configuring-zed.md` Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- docs/src/configuring-zed.md | 68 +++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index fbd5fa53cfbd84..230255597e35cf 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2180,6 +2180,64 @@ Float values between `0.0` and `0.9`, where: } ``` +## UI Font Family + +- Description: The name of the font to use for text in the UI. +- Setting: `ui_font_family` +- Default: `Zed Plex Sans` + +**Options** + +The name of any font family installed on the system. + +## UI Font Features + +- Description: The OpenType features to enable for text in the UI. +- Setting: `ui_font_features` +- Default: `null` +- Platform: macOS and Windows. + +**Options** + +Zed supports all OpenType features that can be enabled or disabled for a given UI font, as well as setting values for font features. + +For example, to disable font ligatures, add the following to your settings: + +```json +{ + "ui_font_features": { + "calt": false + } +} +``` + +You can also set other OpenType features, like setting `cv01` to `7`: + +```json +{ + "ui_font_features": { + "cv01": 7 + } +} +``` + +## UI Font Fallbacks + +- Description: The font fallbacks to use for text in the UI. +- Setting: `ui_font_fallbacks` +- Default: `null` +- Platform: macOS and Windows. + +**Options** + +For example, to use `Nerd Font` as a fallback, add the following to your settings: + +```json +{ + "ui_font_fallbacks": ["Nerd Font"] +} +``` + ## UI Font Size - Description: The default font size for text in the UI. @@ -2190,6 +2248,16 @@ Float values between `0.0` and `0.9`, where: `integer` values from `6` to `100` pixels (inclusive) +## UI Font Weight + +- Description: The default font weight for text in the UI. +- Setting: `ui_font_weight` +- Default: `400` + +**Options** + +`integer` values between `100` and `900` + ## An example configuration: ```json From 5aaaed52fc46fdc3029133fac4f96a7652681ea9 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 2 Oct 2024 18:57:03 +0200 Subject: [PATCH 444/762] Adjust spacing and sizing of buffer search bar icon buttons (#18638) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR mostly makes all of the search bar icon buttons all squared and adjusts the spacing between them, as well as the additional input that appears when you toggle the "Replace all" action. Screenshot 2024-10-02 at 6 08 30 PM --- Release Notes: - N/A --- crates/search/src/buffer_search.rs | 133 +++++++++++++++------------- crates/search/src/project_search.rs | 2 +- crates/search/src/search.rs | 3 +- crates/search/src/search_bar.rs | 3 +- 4 files changed, 76 insertions(+), 65 deletions(-) diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 42b267c3c9563a..5846a6efc51dd2 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -27,7 +27,7 @@ use settings::Settings; use std::sync::Arc; use theme::ThemeSettings; -use ui::{h_flex, prelude::*, IconButton, IconName, Tooltip, BASE_REM_SIZE_IN_PX}; +use ui::{h_flex, prelude::*, IconButton, IconButtonShape, IconName, Tooltip, BASE_REM_SIZE_IN_PX}; use util::ResultExt; use workspace::{ item::ItemHandle, @@ -200,7 +200,7 @@ impl Render for BufferSearchBar { }; let search_line = h_flex() - .mb_1() + .gap_2() .child( h_flex() .id("editor-scroll") @@ -208,7 +208,6 @@ impl Render for BufferSearchBar { .flex_1() .h_8() .px_2() - .mr_2() .py_1() .border_1() .border_color(editor_border) @@ -244,66 +243,70 @@ impl Render for BufferSearchBar { })) }), ) - .when(supported_options.replacement, |this| { - this.child( - IconButton::new("buffer-search-bar-toggle-replace-button", IconName::Replace) - .style(ButtonStyle::Subtle) - .when(self.replace_enabled, |button| { - button.style(ButtonStyle::Filled) - }) - .on_click(cx.listener(|this, _: &ClickEvent, cx| { - this.toggle_replace(&ToggleReplace, cx); - })) - .selected(self.replace_enabled) - .size(ButtonSize::Compact) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |cx| { - Tooltip::for_action_in( - "Toggle replace", - &ToggleReplace, - &focus_handle, - cx, - ) - } - }), - ) - }) - .when(supported_options.selection, |this| { - this.child( - IconButton::new( - "buffer-search-bar-toggle-search-selection-button", - IconName::SearchSelection, - ) - .style(ButtonStyle::Subtle) - .when(self.selection_search_enabled, |button| { - button.style(ButtonStyle::Filled) - }) - .on_click(cx.listener(|this, _: &ClickEvent, cx| { - this.toggle_selection(&ToggleSelection, cx); - })) - .selected(self.selection_search_enabled) - .size(ButtonSize::Compact) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |cx| { - Tooltip::for_action_in( - "Toggle Search Selection", - &ToggleSelection, - &focus_handle, - cx, - ) - } - }), - ) - }) .child( h_flex() .flex_none() + .gap_0p5() + .when(supported_options.replacement, |this| { + this.child( + IconButton::new( + "buffer-search-bar-toggle-replace-button", + IconName::Replace, + ) + .style(ButtonStyle::Subtle) + .shape(IconButtonShape::Square) + .when(self.replace_enabled, |button| { + button.style(ButtonStyle::Filled) + }) + .on_click(cx.listener(|this, _: &ClickEvent, cx| { + this.toggle_replace(&ToggleReplace, cx); + })) + .selected(self.replace_enabled) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle Replace", + &ToggleReplace, + &focus_handle, + cx, + ) + } + }), + ) + }) + .when(supported_options.selection, |this| { + this.child( + IconButton::new( + "buffer-search-bar-toggle-search-selection-button", + IconName::SearchSelection, + ) + .style(ButtonStyle::Subtle) + .shape(IconButtonShape::Square) + .when(self.selection_search_enabled, |button| { + button.style(ButtonStyle::Filled) + }) + .on_click(cx.listener(|this, _: &ClickEvent, cx| { + this.toggle_selection(&ToggleSelection, cx); + })) + .selected(self.selection_search_enabled) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |cx| { + Tooltip::for_action_in( + "Toggle Search Selection", + &ToggleSelection, + &focus_handle, + cx, + ) + } + }), + ) + }) .child( IconButton::new("select-all", ui::IconName::SelectAll) .on_click(|_, cx| cx.dispatch_action(SelectAllMatches.boxed_clone())) - .size(ButtonSize::Compact) + .shape(IconButtonShape::Square) .tooltip({ let focus_handle = focus_handle.clone(); move |cx| { @@ -332,11 +335,13 @@ impl Render for BufferSearchBar { )) .when(!narrow_mode, |this| { this.child(h_flex().ml_2().min_w(rems_from_px(40.)).child( - Label::new(match_text).color(if self.active_match_index.is_some() { - Color::Default - } else { - Color::Disabled - }), + Label::new(match_text).size(LabelSize::Small).color( + if self.active_match_index.is_some() { + Color::Default + } else { + Color::Disabled + }, + ), )) }), ); @@ -367,8 +372,10 @@ impl Render for BufferSearchBar { .child( h_flex() .flex_none() + .gap_0p5() .child( IconButton::new("search-replace-next", ui::IconName::ReplaceNext) + .shape(IconButtonShape::Square) .tooltip({ let focus_handle = focus_handle.clone(); move |cx| { @@ -386,6 +393,7 @@ impl Render for BufferSearchBar { ) .child( IconButton::new("search-replace-all", ui::IconName::ReplaceAll) + .shape(IconButtonShape::Square) .tooltip({ let focus_handle = focus_handle.clone(); move |cx| { @@ -441,6 +449,7 @@ impl Render for BufferSearchBar { .when(!narrow_mode, |div| { div.child( IconButton::new(SharedString::from("Close"), IconName::Close) + .shape(IconButtonShape::Square) .tooltip(move |cx| { Tooltip::for_action("Close Search Bar", &Dismiss, cx) }) diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 12e6ccc12dc496..693d4b265867fb 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -1634,7 +1634,7 @@ impl Render for ProjectSearchBar { let focus_handle = focus_handle.clone(); move |cx| { Tooltip::for_action_in( - "Toggle replace", + "Toggle Replace", &ToggleReplace, &focus_handle, cx, diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index 0ceb8e710b5f41..60ff80834feed4 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -5,7 +5,7 @@ use gpui::{actions, Action, AppContext, FocusHandle, IntoElement}; use project::search::SearchQuery; pub use project_search::ProjectSearchView; use ui::{prelude::*, Tooltip}; -use ui::{ButtonStyle, IconButton}; +use ui::{ButtonStyle, IconButton, IconButtonShape}; use workspace::notifications::NotificationId; use workspace::{Toast, Workspace}; @@ -112,6 +112,7 @@ impl SearchOptions { IconButton::new(self.label(), self.icon()) .on_click(action) .style(ButtonStyle::Subtle) + .shape(IconButtonShape::Square) .selected(active) .tooltip({ let action = self.to_toggle_action(); diff --git a/crates/search/src/search_bar.rs b/crates/search/src/search_bar.rs index 102f04c4b95c28..080679663d0ba4 100644 --- a/crates/search/src/search_bar.rs +++ b/crates/search/src/search_bar.rs @@ -1,6 +1,6 @@ use gpui::{Action, FocusHandle, IntoElement}; -use ui::IconButton; use ui::{prelude::*, Tooltip}; +use ui::{IconButton, IconButtonShape}; pub(super) fn render_nav_button( icon: ui::IconName, @@ -13,6 +13,7 @@ pub(super) fn render_nav_button( SharedString::from(format!("search-nav-button-{}", action.name())), icon, ) + .shape(IconButtonShape::Square) .on_click(|_, cx| cx.dispatch_action(action.boxed_clone())) .tooltip(move |cx| Tooltip::for_action_in(tooltip, action, &focus_handle, cx)) .disabled(!active) From a5f50e5c1e7fc982fad3bc700e55aee3243791f1 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 2 Oct 2024 18:57:20 +0200 Subject: [PATCH 445/762] Tweak warning diagnostic toggle (#18637) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds color to the warning diagnostic toggle, so that, if it's turned on, the warning icon is yellow. And, in the opposite case, it's muted. | Turned on | Turned off | |--------|--------| | Screenshot 2024-10-02 at 6 08 30 PM | Screenshot 2024-10-02 at 6 08 36 PM | --- Release Notes: - N/A --- crates/diagnostics/src/toolbar_controls.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/crates/diagnostics/src/toolbar_controls.rs b/crates/diagnostics/src/toolbar_controls.rs index b546db50a064ba..0d3000814262ad 100644 --- a/crates/diagnostics/src/toolbar_controls.rs +++ b/crates/diagnostics/src/toolbar_controls.rs @@ -1,7 +1,7 @@ use crate::ProjectDiagnosticsEditor; use gpui::{EventEmitter, ParentElement, Render, View, ViewContext, WeakView}; use ui::prelude::*; -use ui::{IconButton, IconName, Tooltip}; +use ui::{IconButton, IconButtonShape, IconName, Tooltip}; use workspace::{item::ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView}; pub struct ToolbarControls { @@ -33,11 +33,19 @@ impl Render for ToolbarControls { "Include Warnings" }; + let warning_color = if include_warnings { + Color::Warning + } else { + Color::Muted + }; + h_flex() + .gap_1() .when(has_stale_excerpts, |div| { div.child( IconButton::new("update-excerpts", IconName::Update) .icon_color(Color::Info) + .shape(IconButtonShape::Square) .disabled(is_updating) .tooltip(move |cx| Tooltip::text("Update excerpts", cx)) .on_click(cx.listener(|this, _, cx| { @@ -51,6 +59,8 @@ impl Render for ToolbarControls { }) .child( IconButton::new("toggle-warnings", IconName::Warning) + .icon_color(warning_color) + .shape(IconButtonShape::Square) .tooltip(move |cx| Tooltip::text(tooltip, cx)) .on_click(cx.listener(|this, _, cx| { if let Some(editor) = this.editor() { From 209ebb0c65bc0ba56e4e0bad1a7b7e475414082b Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 2 Oct 2024 10:44:16 -0700 Subject: [PATCH 446/762] Revert "Fix blurry cursor on Wayland at a scale other than 100%" (#18642) Closes #17771 Reverts zed-industries/zed#17496 This PR turns out to need more work than I thought when I merged it. Release Notes: - Linux: Fix a bug where the cursor would be the wrong size on Wayland --- .../gpui/src/platform/linux/wayland/client.rs | 3 +- .../gpui/src/platform/linux/wayland/cursor.rs | 33 ++++--------------- 2 files changed, 8 insertions(+), 28 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index f0015a7e5820bd..ab87bb20242ed8 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -477,8 +477,7 @@ impl WaylandClient { .as_ref() .map(|primary_selection_manager| primary_selection_manager.get_device(&seat, &qh, ())); - // FIXME: Determine the scaling factor dynamically by the compositor - let mut cursor = Cursor::new(&conn, &globals, 24, 2); + let mut cursor = Cursor::new(&conn, &globals, 24); handle .insert_source(XDPEventSource::new(&common.background_executor), { diff --git a/crates/gpui/src/platform/linux/wayland/cursor.rs b/crates/gpui/src/platform/linux/wayland/cursor.rs index ea29eee73c71f4..6a527650429a4e 100644 --- a/crates/gpui/src/platform/linux/wayland/cursor.rs +++ b/crates/gpui/src/platform/linux/wayland/cursor.rs @@ -11,7 +11,6 @@ pub(crate) struct Cursor { theme_name: Option, surface: WlSurface, size: u32, - scale: u32, shm: WlShm, connection: Connection, } @@ -24,7 +23,7 @@ impl Drop for Cursor { } impl Cursor { - pub fn new(connection: &Connection, globals: &Globals, size: u32, scale: u32) -> Self { + pub fn new(connection: &Connection, globals: &Globals, size: u32) -> Self { Self { theme: CursorTheme::load(&connection, globals.shm.clone(), size).log_err(), theme_name: None, @@ -32,7 +31,6 @@ impl Cursor { shm: globals.shm.clone(), connection: connection.clone(), size, - scale, } } @@ -40,18 +38,14 @@ impl Cursor { if let Some(size) = size { self.size = size; } - if let Some(theme) = CursorTheme::load_from_name( - &self.connection, - self.shm.clone(), - theme_name, - self.size * self.scale, - ) - .log_err() + if let Some(theme) = + CursorTheme::load_from_name(&self.connection, self.shm.clone(), theme_name, self.size) + .log_err() { self.theme = Some(theme); self.theme_name = Some(theme_name.to_string()); } else if let Some(theme) = - CursorTheme::load(&self.connection, self.shm.clone(), self.size * self.scale).log_err() + CursorTheme::load(&self.connection, self.shm.clone(), self.size).log_err() { self.theme = Some(theme); self.theme_name = None; @@ -97,22 +91,9 @@ impl Cursor { let (width, height) = buffer.dimensions(); let (hot_x, hot_y) = buffer.hotspot(); - let scaled_width = width / self.scale; - let scaled_height = height / self.scale; - let scaled_hot_x = hot_x / self.scale; - let scaled_hot_y = hot_y / self.scale; - - self.surface.set_buffer_scale(self.scale as i32); - - wl_pointer.set_cursor( - serial_id, - Some(&self.surface), - scaled_hot_x as i32, - scaled_hot_y as i32, - ); + wl_pointer.set_cursor(serial_id, Some(&self.surface), hot_x as i32, hot_y as i32); self.surface.attach(Some(&buffer), 0, 0); - self.surface - .damage(0, 0, scaled_width as i32, scaled_height as i32); + self.surface.damage(0, 0, width as i32, height as i32); self.surface.commit(); } } else { From 0e8276560f2e9de2dd1783ef8d9e208d01dada44 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 14:10:19 -0400 Subject: [PATCH 447/762] language: Update buffer doc comments (#18646) This PR updates the doc comments in `buffer.rs` to use the standard style for linking to other items. Release Notes: - N/A --- crates/language/src/buffer.rs | 45 ++++++++++++++++++----------------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 8afc4d389db7f5..20ecd9594b9d4a 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -73,7 +73,7 @@ pub use lsp::DiagnosticSeverity; /// a diff against the contents of its file. pub static BUFFER_DIFF_TASK: LazyLock = LazyLock::new(TaskLabel::new); -/// Indicate whether a [Buffer] has permissions to edit. +/// Indicate whether a [`Buffer`] has permissions to edit. #[derive(PartialEq, Clone, Copy, Debug)] pub enum Capability { /// The buffer is a mutable replica. @@ -211,7 +211,7 @@ pub struct Diagnostic { /// /// When a language server produces a diagnostic with /// one or more associated diagnostics, those diagnostics are all - /// assigned a single group id. + /// assigned a single group ID. pub group_id: usize, /// Whether this diagnostic is the primary diagnostic for its group. /// @@ -718,7 +718,7 @@ impl Buffer { self } - /// Returns the [Capability] of this buffer. + /// Returns the [`Capability`] of this buffer. pub fn capability(&self) -> Capability { self.capability } @@ -728,7 +728,7 @@ impl Buffer { self.capability == Capability::ReadOnly } - /// Builds a [Buffer] with the given underlying [TextBuffer], diff base, [File] and [Capability]. + /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`]. pub fn build( buffer: TextBuffer, diff_base: Option, @@ -941,7 +941,7 @@ impl Buffer { self.syntax_map.lock().language_registry() } - /// Assign the buffer a new [Capability]. + /// Assign the buffer a new [`Capability`]. pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext) { self.capability = capability; cx.emit(BufferEvent::CapabilityChanged) @@ -1032,7 +1032,7 @@ impl Buffer { cx.notify(); } - /// Updates the [File] backing this buffer. This should be called when + /// Updates the [`File`] backing this buffer. This should be called when /// the file has changed or has been deleted. pub fn file_updated(&mut self, new_file: Arc, cx: &mut ModelContext) { let mut file_changed = false; @@ -1071,7 +1071,7 @@ impl Buffer { } } - /// Returns the current diff base, see [Buffer::set_diff_base]. + /// Returns the current diff base, see [`Buffer::set_diff_base`]. pub fn diff_base(&self) -> Option<&Rope> { match self.diff_base.as_ref()? { BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => { @@ -1142,12 +1142,12 @@ impl Buffer { })) } - /// Returns the primary [Language] assigned to this [Buffer]. + /// Returns the primary [`Language`] assigned to this [`Buffer`]. pub fn language(&self) -> Option<&Arc> { self.language.as_ref() } - /// Returns the [Language] at the given location. + /// Returns the [`Language`] at the given location. pub fn language_at(&self, position: D) -> Option> { let offset = position.to_offset(self); self.syntax_map @@ -2730,6 +2730,7 @@ impl BufferSnapshot { .collect(); (captures, highlight_maps) } + /// Iterates over chunks of text in the given range of the buffer. Text is chunked /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also /// returned in chunks where each chunk has a single syntax highlighting style and @@ -2781,12 +2782,12 @@ impl BufferSnapshot { .last() } - /// Returns the main [Language] + /// Returns the main [`Language`]. pub fn language(&self) -> Option<&Arc> { self.language.as_ref() } - /// Returns the [Language] at the given location. + /// Returns the [`Language`] at the given location. pub fn language_at(&self, position: D) -> Option<&Arc> { self.syntax_layer_at(position) .map(|info| info.language) @@ -2806,7 +2807,7 @@ impl BufferSnapshot { CharClassifier::new(self.language_scope_at(point)) } - /// Returns the [LanguageScope] at the given location. + /// Returns the [`LanguageScope`] at the given location. pub fn language_scope_at(&self, position: D) -> Option { let offset = position.to_offset(self); let mut scope = None; @@ -2961,7 +2962,7 @@ impl BufferSnapshot { /// Returns the outline for the buffer. /// - /// This method allows passing an optional [SyntaxTheme] to + /// This method allows passing an optional [`SyntaxTheme`] to /// syntax-highlight the returned symbols. pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option> { self.outline_items_containing(0..self.len(), true, theme) @@ -2970,7 +2971,7 @@ impl BufferSnapshot { /// Returns all the symbols that contain the given position. /// - /// This method allows passing an optional [SyntaxTheme] to + /// This method allows passing an optional [`SyntaxTheme`] to /// syntax-highlight the returned symbols. pub fn symbols_containing( &self, @@ -3213,7 +3214,7 @@ impl BufferSnapshot { } /// For each grammar in the language, runs the provided - /// [tree_sitter::Query] against the given range. + /// [`tree_sitter::Query`] against the given range. pub fn matches( &self, range: Range, @@ -3774,7 +3775,7 @@ impl BufferSnapshot { }) } - /// Whether the buffer contains any git changes. + /// Whether the buffer contains any Git changes. pub fn has_git_diff(&self) -> bool { !self.git_diff.is_empty() } @@ -3856,7 +3857,7 @@ impl BufferSnapshot { } /// Returns all the diagnostic groups associated with the given - /// language server id. If no language server id is provided, + /// language server ID. If no language server ID is provided, /// all diagnostics groups are returned. pub fn diagnostic_groups( &self, @@ -4239,7 +4240,7 @@ impl Default for Diagnostic { } impl IndentSize { - /// Returns an [IndentSize] representing the given spaces. + /// Returns an [`IndentSize`] representing the given spaces. pub fn spaces(len: u32) -> Self { Self { len, @@ -4247,7 +4248,7 @@ impl IndentSize { } } - /// Returns an [IndentSize] representing a tab. + /// Returns an [`IndentSize`] representing a tab. pub fn tab() -> Self { Self { len: 1, @@ -4255,12 +4256,12 @@ impl IndentSize { } } - /// An iterator over the characters represented by this [IndentSize]. + /// An iterator over the characters represented by this [`IndentSize`]. pub fn chars(&self) -> impl Iterator { iter::repeat(self.char()).take(self.len as usize) } - /// The character representation of this [IndentSize]. + /// The character representation of this [`IndentSize`]. pub fn char(&self) -> char { match self.kind { IndentKind::Space => ' ', @@ -4268,7 +4269,7 @@ impl IndentSize { } } - /// Consumes the current [IndentSize] and returns a new one that has + /// Consumes the current [`IndentSize`] and returns a new one that has /// been shrunk or enlarged by the given size along the given direction. pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self { match direction { From 7c4615519befe8c35b25e22620d45b07b4b9c401 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 14:23:59 -0400 Subject: [PATCH 448/762] editor: Ensure proposed changes editor is syntax-highlighted when opened (#18648) This PR fixes an issue where the proposed changes editor would not have any syntax highlighting until a modification was made. When creating the branch buffer we reparse the buffer to rebuild the syntax map. Release Notes: - N/A --- crates/language/src/buffer.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 20ecd9594b9d4a..1f4c56ecc86ff6 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -819,6 +819,9 @@ impl Buffer { branch.set_language_registry(language_registry); } + // Reparse the branch buffer so that we get syntax highlighting immediately. + branch.reparse(cx); + branch }) } From 778dedec6c07bca9803c2d6b84ea7c3be7f6fe7e Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 2 Oct 2024 22:00:40 +0300 Subject: [PATCH 449/762] Prepare to sync other kinds of settings (#18616) This PR does not change how things work for settings, but lays the ground work for the future functionality. After this change, Zed is prepared to sync more than just `settings.json` files from local worktree and user config. * ssh tasks Part of this work is to streamline the task sync mechanism. Instead of having an extra set of requests to fetch the task contents from the server (as remote-via-collab does now and does not cover all sync cases), we want to reuse the existing mechanism for synchronizing user and local settings. * editorconfig Part of the task is to sync .editorconfig file changes to everyone which involves sending and storing those configs. Both ssh (and remove-over-collab) .zed/tasks.json and .editorconfig files behave similar to .zed/settings.json local files: they belong to a certain path in a certain worktree; may update over time, changing Zed's functionality; can be merged hierarchically. Settings sync follows the same "config file changed -> send to watchers -> parse and merge locally and on watchers" path that's needed for both new kinds of files, ergo the messaging layer is extended to send more types of settings for future watch & parse and merge impls to follow. Release Notes: - N/A --- .../20221109000000_test_schema.sql | 1 + ...20241002120231_add_local_settings_kind.sql | 1 + crates/collab/src/db.rs | 20 +++ crates/collab/src/db/queries/projects.rs | 9 ++ crates/collab/src/db/queries/rooms.rs | 1 + .../src/db/tables/worktree_settings_file.rs | 16 ++ crates/collab/src/rpc.rs | 2 + crates/collab/src/tests/integration_tests.rs | 44 +++++- crates/project/src/project_settings.rs | 46 +++++- crates/proto/proto/zed.proto | 13 ++ crates/settings/src/settings.rs | 3 +- crates/settings/src/settings_store.rs | 140 +++++++++++------- 12 files changed, 222 insertions(+), 74 deletions(-) create mode 100644 crates/collab/migrations/20241002120231_add_local_settings_kind.sql diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 5c2c3961600acd..5764aceea5fc4a 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -112,6 +112,7 @@ CREATE TABLE "worktree_settings_files" ( "worktree_id" INTEGER NOT NULL, "path" VARCHAR NOT NULL, "content" TEXT, + "kind" VARCHAR, PRIMARY KEY(project_id, worktree_id, path), FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE ); diff --git a/crates/collab/migrations/20241002120231_add_local_settings_kind.sql b/crates/collab/migrations/20241002120231_add_local_settings_kind.sql new file mode 100644 index 00000000000000..aec4ffb8f8519b --- /dev/null +++ b/crates/collab/migrations/20241002120231_add_local_settings_kind.sql @@ -0,0 +1 @@ +ALTER TABLE "worktree_settings_files" ADD COLUMN "kind" VARCHAR; diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 5c30a857389241..f717566824e7b9 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -35,6 +35,7 @@ use std::{ }; use time::PrimitiveDateTime; use tokio::sync::{Mutex, OwnedMutexGuard}; +use worktree_settings_file::LocalSettingsKind; #[cfg(test)] pub use tests::TestDb; @@ -766,6 +767,7 @@ pub struct Worktree { pub struct WorktreeSettingsFile { pub path: String, pub content: String, + pub kind: LocalSettingsKind, } pub struct NewExtensionVersion { @@ -783,3 +785,21 @@ pub struct ExtensionVersionConstraints { pub schema_versions: RangeInclusive, pub wasm_api_versions: RangeInclusive, } + +impl LocalSettingsKind { + pub fn from_proto(proto_kind: proto::LocalSettingsKind) -> Self { + match proto_kind { + proto::LocalSettingsKind::Settings => Self::Settings, + proto::LocalSettingsKind::Tasks => Self::Tasks, + proto::LocalSettingsKind::Editorconfig => Self::Editorconfig, + } + } + + pub fn to_proto(&self) -> proto::LocalSettingsKind { + match self { + Self::Settings => proto::LocalSettingsKind::Settings, + Self::Tasks => proto::LocalSettingsKind::Tasks, + Self::Editorconfig => proto::LocalSettingsKind::Editorconfig, + } + } +} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 8091c6620570f2..ceac78203d9a1b 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -1,3 +1,4 @@ +use anyhow::Context as _; use util::ResultExt; use super::*; @@ -527,6 +528,12 @@ impl Database { connection: ConnectionId, ) -> Result>> { let project_id = ProjectId::from_proto(update.project_id); + let kind = match update.kind { + Some(kind) => proto::LocalSettingsKind::from_i32(kind) + .with_context(|| format!("unknown worktree settings kind: {kind}"))?, + None => proto::LocalSettingsKind::Settings, + }; + let kind = LocalSettingsKind::from_proto(kind); self.project_transaction(project_id, |tx| async move { // Ensure the update comes from the host. let project = project::Entity::find_by_id(project_id) @@ -543,6 +550,7 @@ impl Database { worktree_id: ActiveValue::Set(update.worktree_id as i64), path: ActiveValue::Set(update.path.clone()), content: ActiveValue::Set(content.clone()), + kind: ActiveValue::Set(kind), }) .on_conflict( OnConflict::columns([ @@ -800,6 +808,7 @@ impl Database { worktree.settings_files.push(WorktreeSettingsFile { path: db_settings_file.path, content: db_settings_file.content, + kind: db_settings_file.kind, }); } } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 635e2d232f087f..baba0f2cf9d7c2 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -735,6 +735,7 @@ impl Database { worktree.settings_files.push(WorktreeSettingsFile { path: db_settings_file.path, content: db_settings_file.content, + kind: db_settings_file.kind, }); } } diff --git a/crates/collab/src/db/tables/worktree_settings_file.rs b/crates/collab/src/db/tables/worktree_settings_file.rs index 92348c1ec94366..71f7b73fc1c399 100644 --- a/crates/collab/src/db/tables/worktree_settings_file.rs +++ b/crates/collab/src/db/tables/worktree_settings_file.rs @@ -11,9 +11,25 @@ pub struct Model { #[sea_orm(primary_key)] pub path: String, pub content: String, + pub kind: LocalSettingsKind, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation {} impl ActiveModelBehavior for ActiveModel {} + +#[derive( + Copy, Clone, Debug, PartialEq, Eq, EnumIter, DeriveActiveEnum, Default, Hash, serde::Serialize, +)] +#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")] +#[serde(rename_all = "snake_case")] +pub enum LocalSettingsKind { + #[default] + #[sea_orm(string_value = "settings")] + Settings, + #[sea_orm(string_value = "tasks")] + Tasks, + #[sea_orm(string_value = "editorconfig")] + Editorconfig, +} diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index d9683fb8b366c1..5f21df4ab9dbce 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1739,6 +1739,7 @@ fn notify_rejoined_projects( worktree_id: worktree.id, path: settings_file.path, content: Some(settings_file.content), + kind: Some(settings_file.kind.to_proto().into()), }, )?; } @@ -2220,6 +2221,7 @@ fn join_project_internal( worktree_id: worktree.id, path: settings_file.path, content: Some(settings_file.content), + kind: Some(proto::update_user_settings::Kind::Settings.into()), }, )?; } diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 615ad52e2ef367..28591136344177 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -33,7 +33,7 @@ use project::{ }; use rand::prelude::*; use serde_json::json; -use settings::SettingsStore; +use settings::{LocalSettingsKind, SettingsStore}; use std::{ cell::{Cell, RefCell}, env, future, mem, @@ -3327,8 +3327,16 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .collect::>(), &[ - (Path::new("").into(), r#"{"tab_size":2}"#.to_string()), - (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), + ( + Path::new("").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":2}"#.to_string() + ), + ( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":8}"#.to_string() + ), ] ) }); @@ -3346,8 +3354,16 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .collect::>(), &[ - (Path::new("").into(), r#"{}"#.to_string()), - (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), + ( + Path::new("").into(), + LocalSettingsKind::Settings, + r#"{}"#.to_string() + ), + ( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":8}"#.to_string() + ), ] ) }); @@ -3375,8 +3391,16 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .collect::>(), &[ - (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()), - (Path::new("b").into(), r#"{"tab_size":4}"#.to_string()), + ( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":8}"#.to_string() + ), + ( + Path::new("b").into(), + LocalSettingsKind::Settings, + r#"{"tab_size":4}"#.to_string() + ), ] ) }); @@ -3406,7 +3430,11 @@ async fn test_local_settings( store .local_settings(worktree_b.read(cx).id()) .collect::>(), - &[(Path::new("a").into(), r#"{"hard_tabs":true}"#.to_string()),] + &[( + Path::new("a").into(), + LocalSettingsKind::Settings, + r#"{"hard_tabs":true}"#.to_string() + ),] ) }); } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index d794563672ed06..87150587b3607e 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1,3 +1,4 @@ +use anyhow::Context; use collections::HashMap; use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, EventEmitter, Model, ModelContext}; @@ -6,7 +7,7 @@ use paths::local_settings_file_relative_path; use rpc::{proto, AnyProtoClient, TypedEnvelope}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{InvalidSettingsError, Settings, SettingsSources, SettingsStore}; +use settings::{InvalidSettingsError, LocalSettingsKind, Settings, SettingsSources, SettingsStore}; use std::{ path::{Path, PathBuf}, sync::Arc, @@ -266,13 +267,14 @@ impl SettingsObserver { let store = cx.global::(); for worktree in self.worktree_store.read(cx).worktrees() { let worktree_id = worktree.read(cx).id().to_proto(); - for (path, content) in store.local_settings(worktree.read(cx).id()) { + for (path, kind, content) in store.local_settings(worktree.read(cx).id()) { downstream_client .send(proto::UpdateWorktreeSettings { project_id, worktree_id, path: path.to_string_lossy().into(), content: Some(content), + kind: Some(local_settings_kind_to_proto(kind).into()), }) .log_err(); } @@ -288,6 +290,11 @@ impl SettingsObserver { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> anyhow::Result<()> { + let kind = match envelope.payload.kind { + Some(kind) => proto::LocalSettingsKind::from_i32(kind) + .with_context(|| format!("unknown kind {kind}"))?, + None => proto::LocalSettingsKind::Settings, + }; this.update(&mut cx, |this, cx| { let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); let Some(worktree) = this @@ -297,10 +304,12 @@ impl SettingsObserver { else { return; }; + this.update_settings( worktree, [( PathBuf::from(&envelope.payload.path).into(), + local_settings_kind_from_proto(kind), envelope.payload.content, )], cx, @@ -327,6 +336,7 @@ impl SettingsObserver { ssh.send(proto::UpdateUserSettings { project_id: 0, content, + kind: Some(proto::LocalSettingsKind::Settings.into()), }) .log_err(); } @@ -342,6 +352,7 @@ impl SettingsObserver { ssh.send(proto::UpdateUserSettings { project_id: 0, content, + kind: Some(proto::LocalSettingsKind::Settings.into()), }) .log_err(); } @@ -397,6 +408,7 @@ impl SettingsObserver { settings_contents.push(async move { ( settings_dir, + LocalSettingsKind::Settings, if removed { None } else { @@ -413,15 +425,15 @@ impl SettingsObserver { let worktree = worktree.clone(); cx.spawn(move |this, cx| async move { - let settings_contents: Vec<(Arc, _)> = + let settings_contents: Vec<(Arc, _, _)> = futures::future::join_all(settings_contents).await; cx.update(|cx| { this.update(cx, |this, cx| { this.update_settings( worktree, - settings_contents - .into_iter() - .map(|(path, content)| (path, content.and_then(|c| c.log_err()))), + settings_contents.into_iter().map(|(path, kind, content)| { + (path, kind, content.and_then(|c| c.log_err())) + }), cx, ) }) @@ -433,17 +445,18 @@ impl SettingsObserver { fn update_settings( &mut self, worktree: Model, - settings_contents: impl IntoIterator, Option)>, + settings_contents: impl IntoIterator, LocalSettingsKind, Option)>, cx: &mut ModelContext, ) { let worktree_id = worktree.read(cx).id(); let remote_worktree_id = worktree.read(cx).id(); let result = cx.update_global::>(|store, cx| { - for (directory, file_content) in settings_contents { + for (directory, kind, file_content) in settings_contents { store.set_local_settings( worktree_id, directory.clone(), + kind, file_content.as_deref(), cx, )?; @@ -455,6 +468,7 @@ impl SettingsObserver { worktree_id: remote_worktree_id.to_proto(), path: directory.to_string_lossy().into_owned(), content: file_content, + kind: Some(local_settings_kind_to_proto(kind).into()), }) .log_err(); } @@ -481,3 +495,19 @@ impl SettingsObserver { } } } + +pub fn local_settings_kind_from_proto(kind: proto::LocalSettingsKind) -> LocalSettingsKind { + match kind { + proto::LocalSettingsKind::Settings => LocalSettingsKind::Settings, + proto::LocalSettingsKind::Tasks => LocalSettingsKind::Tasks, + proto::LocalSettingsKind::Editorconfig => LocalSettingsKind::Editorconfig, + } +} + +pub fn local_settings_kind_to_proto(kind: LocalSettingsKind) -> proto::LocalSettingsKind { + match kind { + LocalSettingsKind::Settings => proto::LocalSettingsKind::Settings, + LocalSettingsKind::Tasks => proto::LocalSettingsKind::Tasks, + LocalSettingsKind::Editorconfig => proto::LocalSettingsKind::Editorconfig, + } +} diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 07f64557f47e1d..f6e9645e9c11ac 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -642,6 +642,13 @@ message UpdateWorktreeSettings { uint64 worktree_id = 2; string path = 3; optional string content = 4; + optional LocalSettingsKind kind = 5; +} + +enum LocalSettingsKind { + Settings = 0; + Tasks = 1; + Editorconfig = 2; } message CreateProjectEntry { @@ -2487,6 +2494,12 @@ message AddWorktreeResponse { message UpdateUserSettings { uint64 project_id = 1; string content = 2; + optional Kind kind = 3; + + enum Kind { + Settings = 0; + Tasks = 1; + } } message CheckFileExists { diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index f1f8591bba4525..2ed01dc7c722fd 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -14,7 +14,8 @@ pub use json_schema::*; pub use keymap_file::KeymapFile; pub use settings_file::*; pub use settings_store::{ - InvalidSettingsError, Settings, SettingsLocation, SettingsSources, SettingsStore, + InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources, + SettingsStore, }; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 20bf52f2c57ef0..445420c1db8a7d 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -157,13 +157,14 @@ pub struct SettingsLocation<'a> { pub path: &'a Path, } -/// A set of strongly-typed setting values defined via multiple JSON files. +/// A set of strongly-typed setting values defined via multiple config files. pub struct SettingsStore { setting_values: HashMap>, raw_default_settings: serde_json::Value, raw_user_settings: serde_json::Value, raw_extension_settings: serde_json::Value, - raw_local_settings: BTreeMap<(WorktreeId, Arc), serde_json::Value>, + raw_local_settings: + BTreeMap<(WorktreeId, Arc), HashMap>, tab_size_callback: Option<( TypeId, Box Option + Send + Sync + 'static>, @@ -174,6 +175,13 @@ pub struct SettingsStore { >, } +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum LocalSettingsKind { + Settings, + Tasks, + Editorconfig, +} + impl Global for SettingsStore {} #[derive(Debug)] @@ -520,19 +528,21 @@ impl SettingsStore { pub fn set_local_settings( &mut self, root_id: WorktreeId, - path: Arc, + directory_path: Arc, + kind: LocalSettingsKind, settings_content: Option<&str>, cx: &mut AppContext, ) -> Result<()> { + let raw_local_settings = self + .raw_local_settings + .entry((root_id, directory_path.clone())) + .or_default(); if settings_content.is_some_and(|content| !content.is_empty()) { - self.raw_local_settings.insert( - (root_id, path.clone()), - parse_json_with_comments(settings_content.unwrap())?, - ); + raw_local_settings.insert(kind, parse_json_with_comments(settings_content.unwrap())?); } else { - self.raw_local_settings.remove(&(root_id, path.clone())); + raw_local_settings.remove(&kind); } - self.recompute_values(Some((root_id, &path)), cx)?; + self.recompute_values(Some((root_id, &directory_path)), cx)?; Ok(()) } @@ -553,7 +563,8 @@ impl SettingsStore { /// Add or remove a set of local settings via a JSON string. pub fn clear_local_settings(&mut self, root_id: WorktreeId, cx: &mut AppContext) -> Result<()> { - self.raw_local_settings.retain(|k, _| k.0 != root_id); + self.raw_local_settings + .retain(|(worktree_id, _), _| worktree_id != &root_id); self.recompute_values(Some((root_id, "".as_ref())), cx)?; Ok(()) } @@ -561,7 +572,7 @@ impl SettingsStore { pub fn local_settings( &self, root_id: WorktreeId, - ) -> impl '_ + Iterator, String)> { + ) -> impl '_ + Iterator, LocalSettingsKind, String)> { self.raw_local_settings .range( (root_id, Path::new("").into()) @@ -570,7 +581,12 @@ impl SettingsStore { Path::new("").into(), ), ) - .map(|((_, path), content)| (path.clone(), serde_json::to_string(content).unwrap())) + .flat_map(|((_, path), content)| { + content.iter().filter_map(|(&kind, raw_content)| { + let parsed_content = serde_json::to_string(raw_content).log_err()?; + Some((path.clone(), kind, parsed_content)) + }) + }) } pub fn json_schema( @@ -739,56 +755,63 @@ impl SettingsStore { // Reload the local values for the setting. paths_stack.clear(); project_settings_stack.clear(); - for ((root_id, path), local_settings) in &self.raw_local_settings { - // Build a stack of all of the local values for that setting. - while let Some(prev_entry) = paths_stack.last() { - if let Some((prev_root_id, prev_path)) = prev_entry { - if root_id != prev_root_id || !path.starts_with(prev_path) { - paths_stack.pop(); - project_settings_stack.pop(); - continue; + for ((root_id, directory_path), local_settings) in &self.raw_local_settings { + if let Some(local_settings) = local_settings.get(&LocalSettingsKind::Settings) { + // Build a stack of all of the local values for that setting. + while let Some(prev_entry) = paths_stack.last() { + if let Some((prev_root_id, prev_path)) = prev_entry { + if root_id != prev_root_id || !directory_path.starts_with(prev_path) { + paths_stack.pop(); + project_settings_stack.pop(); + continue; + } } + break; } - break; - } - match setting_value.deserialize_setting(local_settings) { - Ok(local_settings) => { - paths_stack.push(Some((*root_id, path.as_ref()))); - project_settings_stack.push(local_settings); - - // If a local settings file changed, then avoid recomputing local - // settings for any path outside of that directory. - if changed_local_path.map_or( - false, - |(changed_root_id, changed_local_path)| { - *root_id != changed_root_id || !path.starts_with(changed_local_path) - }, - ) { - continue; - } - - if let Some(value) = setting_value - .load_setting( - SettingsSources { - default: &default_settings, - extensions: extension_settings.as_ref(), - user: user_settings.as_ref(), - release_channel: release_channel_settings.as_ref(), - project: &project_settings_stack.iter().collect::>(), + match setting_value.deserialize_setting(local_settings) { + Ok(local_settings) => { + paths_stack.push(Some((*root_id, directory_path.as_ref()))); + project_settings_stack.push(local_settings); + + // If a local settings file changed, then avoid recomputing local + // settings for any path outside of that directory. + if changed_local_path.map_or( + false, + |(changed_root_id, changed_local_path)| { + *root_id != changed_root_id + || !directory_path.starts_with(changed_local_path) }, - cx, - ) - .log_err() - { - setting_value.set_local_value(*root_id, path.clone(), value); + ) { + continue; + } + + if let Some(value) = setting_value + .load_setting( + SettingsSources { + default: &default_settings, + extensions: extension_settings.as_ref(), + user: user_settings.as_ref(), + release_channel: release_channel_settings.as_ref(), + project: &project_settings_stack.iter().collect::>(), + }, + cx, + ) + .log_err() + { + setting_value.set_local_value( + *root_id, + directory_path.clone(), + value, + ); + } + } + Err(error) => { + return Err(anyhow!(InvalidSettingsError::LocalSettings { + path: directory_path.join(local_settings_file_relative_path()), + message: error.to_string() + })); } - } - Err(error) => { - return Err(anyhow!(InvalidSettingsError::LocalSettings { - path: path.join(local_settings_file_relative_path()), - message: error.to_string() - })); } } } @@ -1201,6 +1224,7 @@ mod tests { .set_local_settings( WorktreeId::from_usize(1), Path::new("/root1").into(), + LocalSettingsKind::Settings, Some(r#"{ "user": { "staff": true } }"#), cx, ) @@ -1209,6 +1233,7 @@ mod tests { .set_local_settings( WorktreeId::from_usize(1), Path::new("/root1/subdir").into(), + LocalSettingsKind::Settings, Some(r#"{ "user": { "name": "Jane Doe" } }"#), cx, ) @@ -1218,6 +1243,7 @@ mod tests { .set_local_settings( WorktreeId::from_usize(1), Path::new("/root2").into(), + LocalSettingsKind::Settings, Some(r#"{ "user": { "age": 42 }, "key2": "b" }"#), cx, ) From f809787275850a70a45a30bf7b72ae5c9dd547b2 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 15:23:22 -0400 Subject: [PATCH 450/762] Update cloudflare/wrangler-action digest to 168bc28 (#18651) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [cloudflare/wrangler-action](https://redirect.github.com/cloudflare/wrangler-action) | action | digest | `f84a562` -> `168bc28` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/deploy_cloudflare.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index 5cf4d6fd13140e..5c09c29b0f44f2 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -36,28 +36,28 @@ jobs: mdbook build ./docs --dest-dir=../target/deploy/docs/ - name: Deploy Docs - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@168bc28b7078db16f6f1ecc26477fc2248592143 # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: pages deploy target/deploy --project-name=docs - name: Deploy Install - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@168bc28b7078db16f6f1ecc26477fc2248592143 # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh - name: Deploy Docs Workers - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@168bc28b7078db16f6f1ecc26477fc2248592143 # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} command: deploy .cloudflare/docs-proxy/src/worker.js - name: Deploy Install Workers - uses: cloudflare/wrangler-action@f84a562284fc78278ff9052435d9526f9c718361 # v3 + uses: cloudflare/wrangler-action@168bc28b7078db16f6f1ecc26477fc2248592143 # v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} From 3a5deb5c6fc7ae1354cc5c59773055e85519a3cc Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 3 Oct 2024 01:00:48 +0530 Subject: [PATCH 451/762] Replace isahc with async ureq (#18414) REplace isahc with ureq everywhere gpui is used. This should allow us to make http requests without libssl; and avoid a long-tail of panics caused by ishac. Release Notes: - (potentially breaking change) updated our http client --------- Co-authored-by: Mikayla --- Cargo.lock | 602 ++++++++++++------ Cargo.toml | 17 +- crates/client/Cargo.toml | 3 +- crates/client/src/client.rs | 27 +- crates/collab/Cargo.toml | 2 +- crates/collab/src/api/events.rs | 40 +- crates/collab/src/llm.rs | 12 +- crates/collab/src/rpc.rs | 6 +- crates/evals/Cargo.toml | 2 +- crates/evals/src/eval.rs | 7 +- crates/extension/Cargo.toml | 4 +- crates/extension/src/extension_builder.rs | 2 +- crates/extension/src/extension_store_test.rs | 52 +- crates/extension_cli/Cargo.toml | 2 +- crates/extension_cli/src/main.rs | 9 +- crates/http_client/Cargo.toml | 4 +- crates/http_client/src/http_client.rs | 29 +- crates/isahc_http_client/LICENSE-APACHE | 1 - .../src/isahc_http_client.rs | 105 --- crates/live_kit_server/Cargo.toml | 2 +- crates/reqwest_client/Cargo.toml | 31 + crates/reqwest_client/LICENSE-GPL | 1 + crates/reqwest_client/examples/client.rs | 16 + crates/reqwest_client/src/reqwest_client.rs | 232 +++++++ crates/semantic_index/Cargo.toml | 2 +- crates/semantic_index/examples/index.rs | 7 +- .../Cargo.toml | 16 +- crates/ureq_client/LICENSE-GPL | 1 + crates/ureq_client/examples/client.rs | 24 + crates/ureq_client/src/ureq_client.rs | 187 ++++++ crates/vim/Cargo.toml | 2 +- crates/zed/Cargo.toml | 2 +- crates/zed/src/main.rs | 10 +- 33 files changed, 1063 insertions(+), 396 deletions(-) delete mode 120000 crates/isahc_http_client/LICENSE-APACHE delete mode 100644 crates/isahc_http_client/src/isahc_http_client.rs create mode 100644 crates/reqwest_client/Cargo.toml create mode 120000 crates/reqwest_client/LICENSE-GPL create mode 100644 crates/reqwest_client/examples/client.rs create mode 100644 crates/reqwest_client/src/reqwest_client.rs rename crates/{isahc_http_client => ureq_client}/Cargo.toml (52%) create mode 120000 crates/ureq_client/LICENSE-GPL create mode 100644 crates/ureq_client/examples/client.rs create mode 100644 crates/ureq_client/src/ureq_client.rs diff --git a/Cargo.lock b/Cargo.lock index b69e4541cc9e20..a96e59df34c1db 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -846,8 +846,8 @@ dependencies = [ "chrono", "futures-util", "http-types", - "hyper", - "hyper-rustls", + "hyper 0.14.30", + "hyper-rustls 0.24.2", "serde", "serde_json", "serde_path_to_error", @@ -880,15 +880,14 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-tls" -version = "0.12.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfeefd0ca297cbbb3bd34fd6b228401c2a5177038257afd751bc29f0a2da4795" +checksum = "b2ae3c9eba89d472a0e4fe1dea433df78fbbe63d2b764addaf2ba3a6bde89a5e" dependencies = [ "futures-core", "futures-io", - "rustls 0.20.9", + "rustls 0.21.12", "rustls-pemfile 1.0.4", - "webpki", "webpki-roots 0.22.6", ] @@ -905,9 +904,9 @@ dependencies = [ [[package]] name = "async-tungstenite" -version = "0.23.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc" +checksum = "90e661b6cb0a6eb34d02c520b052daa3aa9ac0cc02495c9d066bbce13ead132b" dependencies = [ "async-std", "async-tls", @@ -915,7 +914,7 @@ dependencies = [ "futures-util", "log", "pin-project-lite", - "tungstenite 0.20.1", + "tungstenite 0.24.0", ] [[package]] @@ -1064,7 +1063,7 @@ dependencies = [ "fastrand 2.1.1", "hex", "http 0.2.12", - "ring 0.17.8", + "ring", "time", "tokio", "tracing", @@ -1233,7 +1232,7 @@ dependencies = [ "once_cell", "p256", "percent-encoding", - "ring 0.17.8", + "ring", "sha2", "subtle", "time", @@ -1336,13 +1335,13 @@ dependencies = [ "aws-smithy-types", "bytes 1.7.1", "fastrand 2.1.1", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "http-body 1.0.1", "httparse", - "hyper", - "hyper-rustls", + "hyper 0.14.30", + "hyper-rustls 0.24.2", "once_cell", "pin-project-lite", "pin-utils", @@ -1432,7 +1431,7 @@ dependencies = [ "headers", "http 0.2.12", "http-body 0.4.6", - "hyper", + "hyper 0.14.30", "itoa", "matchit", "memchr", @@ -1445,7 +1444,7 @@ dependencies = [ "serde_path_to_error", "serde_urlencoded", "sha1", - "sync_wrapper", + "sync_wrapper 0.1.2", "tokio", "tokio-tungstenite 0.20.1", "tower", @@ -1584,7 +1583,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.76", ] @@ -1604,7 +1603,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.76", ] @@ -2100,12 +2099,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" -[[package]] -name = "castaway" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" - [[package]] name = "cbc" version = "0.1.2" @@ -2363,8 +2356,8 @@ dependencies = [ "clickhouse-derive", "clickhouse-rs-cityhash-sys", "futures 0.3.30", - "hyper", - "hyper-tls", + "hyper 0.14.30", + "hyper-tls 0.5.0", "lz4", "sealed", "serde", @@ -2402,6 +2395,7 @@ dependencies = [ "anyhow", "async-native-tls", "async-recursion 0.3.2", + "async-tls", "async-tungstenite", "chrono", "clock", @@ -2419,8 +2413,6 @@ dependencies = [ "rand 0.8.5", "release_channel", "rpc", - "rustls 0.20.9", - "rustls-native-certs 0.8.0", "schemars", "serde", "serde_json", @@ -2567,9 +2559,8 @@ dependencies = [ "headless", "hex", "http_client", - "hyper", + "hyper 0.14.30", "indoc", - "isahc_http_client", "jsonwebtoken", "language", "language_model", @@ -2593,7 +2584,8 @@ dependencies = [ "release_channel", "remote", "remote_server", - "reqwest", + "reqwest 0.11.27", + "reqwest_client", "rpc", "rustc-demangle", "scrypt", @@ -2677,7 +2669,7 @@ dependencies = [ name = "collections" version = "0.1.0" dependencies = [ - "rustc-hash", + "rustc-hash 1.1.0", ] [[package]] @@ -2995,7 +2987,7 @@ dependencies = [ "log", "rangemap", "rayon", - "rustc-hash", + "rustc-hash 1.1.0", "rustybuzz", "self_cell", "swash", @@ -3085,7 +3077,7 @@ dependencies = [ "hashbrown 0.14.5", "log", "regalloc2", - "rustc-hash", + "rustc-hash 1.1.0", "smallvec", "target-lexicon", ] @@ -3341,36 +3333,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "curl" -version = "0.4.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" -dependencies = [ - "curl-sys", - "libc", - "openssl-probe", - "openssl-sys", - "schannel", - "socket2 0.5.7", - "windows-sys 0.52.0", -] - -[[package]] -name = "curl-sys" -version = "0.4.74+curl-8.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8af10b986114528fcdc4b63b6f5f021b7057618411046a4de2ba0f0149a097bf" -dependencies = [ - "cc", - "libc", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", - "windows-sys 0.52.0", -] - [[package]] name = "cursor-icon" version = "1.1.0" @@ -4032,7 +3994,6 @@ dependencies = [ "git", "gpui", "http_client", - "isahc_http_client", "language", "languages", "node_runtime", @@ -4043,6 +4004,7 @@ dependencies = [ "serde_json", "settings", "smol", + "ureq_client", ] [[package]] @@ -4127,7 +4089,6 @@ dependencies = [ "gpui", "http_client", "indexed_docs", - "isahc_http_client", "language", "log", "lsp", @@ -4136,6 +4097,7 @@ dependencies = [ "paths", "project", "release_channel", + "reqwest_client", "schemars", "semantic_version", "serde", @@ -4145,8 +4107,10 @@ dependencies = [ "snippet_provider", "task", "theme", + "tokio", "toml 0.8.19", "ui", + "ureq_client", "url", "util", "wasm-encoder 0.215.0", @@ -4166,9 +4130,9 @@ dependencies = [ "env_logger", "extension", "fs", - "isahc_http_client", "language", "log", + "reqwest_client", "rpc", "serde", "serde_json", @@ -4415,7 +4379,7 @@ dependencies = [ "futures-core", "futures-sink", "nanorand", - "spin 0.9.8", + "spin", ] [[package]] @@ -5181,6 +5145,25 @@ dependencies = [ "tracing", ] +[[package]] +name = "h2" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +dependencies = [ + "atomic-waker", + "bytes 1.7.1", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", + "indexmap 2.4.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "2.4.1" @@ -5561,8 +5544,10 @@ dependencies = [ "anyhow", "derive_more", "futures 0.3.30", - "http 0.2.12", + "http 1.1.0", "log", + "rustls 0.21.12", + "rustls-native-certs 0.8.0", "serde", "serde_json", "smol", @@ -5603,7 +5588,7 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "httparse", @@ -5617,6 +5602,26 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes 1.7.1", + "futures-channel", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + [[package]] name = "hyper-rustls" version = "0.24.2" @@ -5625,12 +5630,29 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper", + "hyper 0.14.30", "log", "rustls 0.21.12", "rustls-native-certs 0.6.3", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.4.1", + "hyper-util", + "rustls 0.23.13", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.0", + "tower-service", ] [[package]] @@ -5640,12 +5662,47 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.7.1", - "hyper", + "hyper 0.14.30", "native-tls", "tokio", "tokio-native-tls", ] +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes 1.7.1", + "http-body-util", + "hyper 1.4.1", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +dependencies = [ + "bytes 1.7.1", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "hyper 1.4.1", + "pin-project-lite", + "socket2 0.5.7", + "tokio", + "tower-service", + "tracing", +] + [[package]] name = "iana-time-zone" version = "0.1.60" @@ -6013,44 +6070,6 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" -[[package]] -name = "isahc" -version = "1.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" -dependencies = [ - "async-channel 1.9.0", - "castaway", - "crossbeam-utils", - "curl", - "curl-sys", - "encoding_rs", - "event-listener 2.5.3", - "futures-lite 1.13.0", - "http 0.2.12", - "log", - "mime", - "once_cell", - "polling 2.8.0", - "slab", - "sluice", - "tracing", - "tracing-futures", - "url", - "waker-fn", -] - -[[package]] -name = "isahc_http_client" -version = "0.1.0" -dependencies = [ - "anyhow", - "futures 0.3.30", - "http_client", - "isahc", - "util", -] - [[package]] name = "itertools" version = "0.10.5" @@ -6155,7 +6174,7 @@ dependencies = [ "base64 0.21.7", "js-sys", "pem", - "ring 0.17.8", + "ring", "serde", "serde_json", "simple_asn1", @@ -6406,7 +6425,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin 0.9.8", + "spin", ] [[package]] @@ -6601,7 +6620,7 @@ dependencies = [ "prost", "prost-build", "prost-types", - "reqwest", + "reqwest 0.12.8", "serde", ] @@ -7085,7 +7104,7 @@ dependencies = [ "hexf-parse", "indexmap 2.4.0", "log", - "rustc-hash", + "rustc-hash 1.1.0", "spirv", "termcolor", "thiserror", @@ -8742,6 +8761,54 @@ dependencies = [ "zed_actions", ] +[[package]] +name = "quinn" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c7c5fdde3cdae7203427dc4f0a68fe0ed09833edc525a03456b153b79828684" +dependencies = [ + "bytes 1.7.1", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.0.0", + "rustls 0.23.13", + "socket2 0.5.7", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" +dependencies = [ + "bytes 1.7.1", + "rand 0.8.5", + "ring", + "rustc-hash 2.0.0", + "rustls 0.23.13", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" +dependencies = [ + "libc", + "once_cell", + "socket2 0.5.7", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "quote" version = "1.0.37" @@ -9019,7 +9086,7 @@ checksum = "ad156d539c879b7a24a363a2016d77961786e71f48f2e2fc8302a92abd2429a6" dependencies = [ "hashbrown 0.13.2", "log", - "rustc-hash", + "rustc-hash 1.1.0", "slice-group-by", "smallvec", ] @@ -9196,11 +9263,11 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper", - "hyper-tls", + "hyper 0.14.30", + "hyper-tls 0.5.0", "ipnet", "js-sys", "log", @@ -9213,8 +9280,8 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", - "system-configuration", + "sync_wrapper 0.1.2", + "system-configuration 0.5.1", "tokio", "tokio-native-tls", "tower-service", @@ -9225,6 +9292,68 @@ dependencies = [ "winreg 0.50.0", ] +[[package]] +name = "reqwest" +version = "0.12.8" +source = "git+https://github.com/zed-industries/reqwest.git?rev=fd110f6998da16bbca97b6dddda9be7827c50e29#fd110f6998da16bbca97b6dddda9be7827c50e29" +dependencies = [ + "base64 0.22.1", + "bytes 1.7.1", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", + "hyper-rustls 0.27.3", + "hyper-tls 0.6.0", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls 0.23.13", + "rustls-pemfile 2.1.3", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.1", + "system-configuration 0.6.1", + "tokio", + "tokio-native-tls", + "tokio-rustls 0.26.0", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "windows-registry", +] + +[[package]] +name = "reqwest_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "bytes 1.7.1", + "futures 0.3.30", + "http_client", + "reqwest 0.12.8", + "serde", + "smol", + "tokio", +] + [[package]] name = "resvg" version = "0.41.0" @@ -9273,21 +9402,6 @@ dependencies = [ "util", ] -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - [[package]] name = "ring" version = "0.17.8" @@ -9298,8 +9412,8 @@ dependencies = [ "cfg-if", "getrandom 0.2.15", "libc", - "spin 0.9.8", - "untrusted 0.9.0", + "spin", + "untrusted", "windows-sys 0.52.0", ] @@ -9455,7 +9569,7 @@ dependencies = [ "futures 0.3.30", "glob", "rand 0.8.5", - "ring 0.17.8", + "ring", "serde", "serde_json", "shellexpand 3.1.0", @@ -9527,6 +9641,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" + [[package]] name = "rustc_version" version = "0.4.1" @@ -9578,26 +9698,28 @@ dependencies = [ [[package]] name = "rustls" -version = "0.20.9" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring 0.16.20", + "ring", + "rustls-webpki 0.101.7", "sct", - "webpki", ] [[package]] name = "rustls" -version = "0.21.12" +version = "0.23.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" dependencies = [ - "log", - "ring 0.17.8", - "rustls-webpki", - "sct", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", ] [[package]] @@ -9656,8 +9778,19 @@ version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", + "ring", + "untrusted", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", ] [[package]] @@ -9771,8 +9904,8 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -9968,7 +10101,6 @@ dependencies = [ "gpui", "heed", "http_client", - "isahc_http_client", "language", "language_model", "languages", @@ -9986,6 +10118,7 @@ dependencies = [ "tree-sitter", "ui", "unindent", + "ureq_client", "util", "workspace", "worktree", @@ -10418,17 +10551,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "sluice" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" -dependencies = [ - "async-channel 1.9.0", - "futures-core", - "futures-io", -] - [[package]] name = "smallvec" version = "1.13.2" @@ -10543,12 +10665,6 @@ dependencies = [ "smallvec", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - [[package]] name = "spin" version = "0.9.8" @@ -11178,6 +11294,15 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] + [[package]] name = "synchronoise" version = "1.0.1" @@ -11218,7 +11343,18 @@ checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", "core-foundation 0.9.4", - "system-configuration-sys", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.6.0", + "core-foundation 0.9.4", + "system-configuration-sys 0.6.0", ] [[package]] @@ -11231,6 +11367,16 @@ dependencies = [ "libc", ] +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "system-deps" version = "6.2.2" @@ -11607,7 +11753,7 @@ dependencies = [ "fancy-regex", "lazy_static", "parking_lot", - "rustc-hash", + "rustc-hash 1.1.0", ] [[package]] @@ -11822,6 +11968,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls 0.23.13", + "rustls-pki-types", + "tokio", +] + [[package]] name = "tokio-socks" version = "0.5.2" @@ -11871,9 +12028,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes 1.7.1", "futures-core", @@ -12055,16 +12212,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -12371,6 +12518,24 @@ dependencies = [ "utf-8", ] +[[package]] +name = "tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a" +dependencies = [ + "byteorder", + "bytes 1.7.1", + "data-encoding", + "http 1.1.0", + "httparse", + "log", + "rand 0.8.5", + "sha1", + "thiserror", + "utf-8", +] + [[package]] name = "typeid" version = "1.0.2" @@ -12531,15 +12696,40 @@ checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" [[package]] name = "untrusted" -version = "0.7.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] -name = "untrusted" -version = "0.9.0" +name = "ureq" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97" +dependencies = [ + "base64 0.21.7", + "flate2", + "log", + "once_cell", + "rustls 0.21.12", + "rustls-webpki 0.101.7", + "url", + "webpki-roots 0.25.4", +] + +[[package]] +name = "ureq_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.30", + "gpui", + "http_client", + "parking_lot", + "serde", + "smol", + "ureq", + "util", +] [[package]] name = "url" @@ -12844,7 +13034,7 @@ dependencies = [ "futures-util", "headers", "http 0.2.12", - "hyper", + "hyper 0.14.30", "log", "mime", "mime_guess", @@ -12980,6 +13170,19 @@ dependencies = [ "wasmparser 0.201.0", ] +[[package]] +name = "wasm-streams" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wasmparser" version = "0.201.0" @@ -13395,8 +13598,8 @@ version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -13653,6 +13856,17 @@ dependencies = [ "syn 2.0.76", ] +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result 0.2.0", + "windows-strings", + "windows-targets 0.52.6", +] + [[package]] name = "windows-result" version = "0.1.2" @@ -14443,7 +14657,6 @@ dependencies = [ "image_viewer", "inline_completion_button", "install_cli", - "isahc_http_client", "journal", "language", "language_model", @@ -14496,6 +14709,7 @@ dependencies = [ "tree-sitter-md", "tree-sitter-rust", "ui", + "ureq_client", "url", "urlencoding", "util", diff --git a/Cargo.toml b/Cargo.toml index 1ef14dae70c202..fea528db5b89ee 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,6 +7,7 @@ members = [ "crates/assistant", "crates/assistant_slash_command", "crates/assistant_tool", + "crates/ureq_client", "crates/audio", "crates/auto_update", "crates/breadcrumbs", @@ -52,7 +53,6 @@ members = [ "crates/indexed_docs", "crates/inline_completion_button", "crates/install_cli", - "crates/isahc_http_client", "crates/journal", "crates/language", "crates/language_model", @@ -87,6 +87,7 @@ members = [ "crates/release_channel", "crates/remote", "crates/remote_server", + "crates/reqwest_client", "crates/repl", "crates/rich_text", "crates/rope", @@ -186,6 +187,8 @@ assets = { path = "crates/assets" } assistant = { path = "crates/assistant" } assistant_slash_command = { path = "crates/assistant_slash_command" } assistant_tool = { path = "crates/assistant_tool" } +ureq_client = { path = "crates/ureq_client" } +async-compat = { version = "0.2.1" } audio = { path = "crates/audio" } auto_update = { path = "crates/auto_update" } breadcrumbs = { path = "crates/breadcrumbs" } @@ -228,7 +231,6 @@ image_viewer = { path = "crates/image_viewer" } indexed_docs = { path = "crates/indexed_docs" } inline_completion_button = { path = "crates/inline_completion_button" } install_cli = { path = "crates/install_cli" } -isahc_http_client = { path = "crates/isahc_http_client" } journal = { path = "crates/journal" } language = { path = "crates/language" } language_model = { path = "crates/language_model" } @@ -265,6 +267,7 @@ release_channel = { path = "crates/release_channel" } remote = { path = "crates/remote" } remote_server = { path = "crates/remote_server" } repl = { path = "crates/repl" } +reqwest_client = { path = "crates/reqwest_client" } rich_text = { path = "crates/rich_text" } rope = { path = "crates/rope" } rpc = { path = "crates/rpc" } @@ -325,7 +328,7 @@ async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "8 async-recursion = "1.0.0" async-tar = "0.5.0" async-trait = "0.1" -async-tungstenite = "0.23" +async-tungstenite = "0.28" async-watch = "0.3.1" async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } base64 = "0.22" @@ -364,10 +367,7 @@ ignore = "0.4.22" image = "0.25.1" indexmap = { version = "1.6.2", features = ["serde"] } indoc = "2" -# We explicitly disable http2 support in isahc. -isahc = { version = "1.7.2", default-features = false, features = [ - "text-decoding", -] } + itertools = "0.13.0" jsonwebtoken = "9.3" libc = "0.2" @@ -392,13 +392,14 @@ pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" regex = "1.5" repair_json = "0.1.0" +reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29" } rsa = "0.9.6" runtimelib = { version = "0.15", default-features = false, features = [ "async-dispatcher-runtime", ] } rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } -rustls = "0.20.3" +rustls = "0.21.12" rustls-native-certs = "0.8.0" schemars = { version = "0.8", features = ["impl_json_schema"] } semver = "1.0" diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index dd420bbbe63184..c3fbea1f98a882 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -18,6 +18,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup [dependencies] anyhow.workspace = true async-recursion = "0.3" +async-tls = "0.13" async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] } chrono = { workspace = true, features = ["serde"] } clock.workspace = true @@ -34,8 +35,6 @@ postage.workspace = true rand.workspace = true release_channel.workspace = true rpc = { workspace = true, features = ["gpui"] } -rustls.workspace = true -rustls-native-certs.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index d565d620c3c206..819bd7551f5965 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1023,7 +1023,7 @@ impl Client { &self, http: Arc, release_channel: Option, - ) -> impl Future> { + ) -> impl Future> { #[cfg(any(test, feature = "test-support"))] let url_override = self.rpc_url.read().clone(); @@ -1117,7 +1117,7 @@ impl Client { // for us from the RPC URL. // // Among other things, it will generate and set a `Sec-WebSocket-Key` header for us. - let mut request = rpc_url.into_client_request()?; + let mut request = IntoClientRequest::into_client_request(rpc_url.as_str())?; // We then modify the request to add our desired headers. let request_headers = request.headers_mut(); @@ -1137,30 +1137,13 @@ impl Client { match url_scheme { Https => { - let client_config = { - let mut root_store = rustls::RootCertStore::empty(); - - let root_certs = rustls_native_certs::load_native_certs(); - for error in root_certs.errors { - log::warn!("error loading native certs: {:?}", error); - } - root_store.add_parsable_certificates( - &root_certs - .certs - .into_iter() - .map(|cert| cert.as_ref().to_owned()) - .collect::>(), - ); - rustls::ClientConfig::builder() - .with_safe_defaults() - .with_root_certificates(root_store) - .with_no_client_auth() - }; let (stream, _) = async_tungstenite::async_tls::client_async_tls_with_connector( request, stream, - Some(client_config.into()), + Some(async_tls::TlsConnector::from( + http_client::TLS_CONFIG.clone(), + )), ) .await?; Ok(Connection::new( diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index ad43d2d1f0cf50..7d4c5d0c706b7e 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -37,7 +37,7 @@ futures.workspace = true google_ai.workspace = true hex.workspace = true http_client.workspace = true -isahc_http_client.workspace = true +reqwest_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true log.workspace = true diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index bbfa69c0b8f70b..dd1370e8866442 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -674,7 +674,7 @@ pub struct EditorEventRow { copilot_enabled_for_language: bool, historical_event: bool, architecture: String, - is_staff: Option, + is_staff: bool, major: Option, minor: Option, patch: Option, @@ -708,7 +708,7 @@ impl EditorEventRow { installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), metrics_id: body.metrics_id.clone().unwrap_or_default(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), operation: event.operation, file_extension: event.file_extension.unwrap_or_default(), @@ -741,7 +741,7 @@ pub struct InlineCompletionEventRow { region_code: String, city: String, time: i64, - is_staff: Option, + is_staff: bool, major: Option, minor: Option, patch: Option, @@ -772,7 +772,7 @@ impl InlineCompletionEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), file_extension: event.file_extension.unwrap_or_default(), signed_in: wrapper.signed_in, @@ -800,7 +800,7 @@ pub struct CallEventRow { // ClientEventBase installation_id: String, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // CallEventRow @@ -832,7 +832,7 @@ impl CallEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), operation: event.operation, room_id: event.room_id, @@ -856,7 +856,7 @@ pub struct AssistantEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // AssistantEventRow @@ -891,7 +891,7 @@ impl AssistantEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), conversation_id: event.conversation_id.unwrap_or_default(), kind: event.kind.to_string(), @@ -909,7 +909,7 @@ impl AssistantEventRow { pub struct CpuEventRow { installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, usage_as_percentage: f32, core_count: u32, app_version: String, @@ -947,7 +947,7 @@ impl CpuEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), usage_as_percentage: event.usage_as_percentage, core_count: event.core_count, @@ -970,7 +970,7 @@ pub struct MemoryEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // MemoryEventRow @@ -1001,7 +1001,7 @@ impl MemoryEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), memory_in_bytes: event.memory_in_bytes, virtual_memory_in_bytes: event.virtual_memory_in_bytes, @@ -1024,7 +1024,7 @@ pub struct AppEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // AppEventRow @@ -1054,7 +1054,7 @@ impl AppEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), operation: event.operation, } @@ -1076,7 +1076,7 @@ pub struct SettingEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // SettingEventRow setting: String, @@ -1106,7 +1106,7 @@ impl SettingEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), setting: event.setting, value: event.value, @@ -1129,7 +1129,7 @@ pub struct ExtensionEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // ExtensionEventRow @@ -1164,7 +1164,7 @@ impl ExtensionEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), extension_id: event.extension_id, extension_version: event.version, @@ -1198,7 +1198,7 @@ pub struct ReplEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: Option, + is_staff: bool, time: i64, // ReplEventRow @@ -1230,7 +1230,7 @@ impl ReplEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff, + is_staff: body.is_staff.unwrap_or_default(), time: time.timestamp_millis(), kernel_language: event.kernel_language, kernel_status: event.kernel_status, diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index 14f10342a78dd7..2d040cfa28e1a9 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -22,7 +22,8 @@ use chrono::{DateTime, Duration, Utc}; use collections::HashMap; use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase}; use futures::{Stream, StreamExt as _}; -use isahc_http_client::IsahcHttpClient; + +use reqwest_client::ReqwestClient; use rpc::ListModelsResponse; use rpc::{ proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME, @@ -43,7 +44,7 @@ pub struct LlmState { pub config: Config, pub executor: Executor, pub db: Arc, - pub http_client: IsahcHttpClient, + pub http_client: ReqwestClient, pub clickhouse_client: Option, active_user_count_by_model: RwLock, ActiveUserCount)>>, @@ -69,11 +70,8 @@ impl LlmState { let db = Arc::new(db); let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); - let http_client = IsahcHttpClient::builder() - .default_header("User-Agent", user_agent) - .build() - .map(IsahcHttpClient::from) - .context("failed to construct http client")?; + let http_client = + ReqwestClient::user_agent(&user_agent).context("failed to construct http client")?; let this = Self { executor, diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 5f21df4ab9dbce..27c95a5b44e1a8 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -36,8 +36,8 @@ use collections::{HashMap, HashSet}; pub use connection_pool::{ConnectionPool, ZedVersion}; use core::fmt::{self, Debug, Formatter}; use http_client::HttpClient; -use isahc_http_client::IsahcHttpClient; use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL}; +use reqwest_client::ReqwestClient; use sha2::Digest; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; @@ -954,8 +954,8 @@ impl Server { tracing::info!("connection opened"); let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); - let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { - Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)), + let http_client = match ReqwestClient::user_agent(&user_agent) { + Ok(http_client) => Arc::new(http_client), Err(error) => { tracing::error!(?error, "failed to create HTTP client"); return; diff --git a/crates/evals/Cargo.toml b/crates/evals/Cargo.toml index 400ab139aa2e40..52af0ce446f918 100644 --- a/crates/evals/Cargo.toml +++ b/crates/evals/Cargo.toml @@ -16,6 +16,7 @@ path = "src/eval.rs" [dependencies] clap.workspace = true anyhow.workspace = true +ureq_client.workspace = true client.workspace = true clock.workspace = true collections.workspace = true @@ -24,7 +25,6 @@ feature_flags.workspace = true fs.workspace = true git.workspace = true gpui.workspace = true -isahc_http_client.workspace = true language.workspace = true languages.workspace = true http_client.workspace = true diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 899d8210537111..e2dc5c8e03a2fa 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -32,6 +32,7 @@ use std::{ Arc, }, }; +use ureq_client::UreqClient; const CODESEARCH_NET_DIR: &'static str = "target/datasets/code-search-net"; const EVAL_REPOS_DIR: &'static str = "target/datasets/eval-repos"; @@ -100,7 +101,11 @@ fn main() -> Result<()> { gpui::App::headless().run(move |cx| { let executor = cx.background_executor().clone(); - let client = isahc_http_client::IsahcHttpClient::new(None, None); + let client = Arc::new(UreqClient::new( + None, + "Zed LLM evals".to_string(), + executor.clone(), + )); cx.set_http_client(client.clone()); match cli.command { Commands::Fetch {} => { diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index 6ce1bd6862a1dd..9fea3a768a0c02 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -56,10 +56,12 @@ task.workspace = true serde_json_lenient.workspace = true [dev-dependencies] -isahc_http_client.workspace = true +ureq_client.workspace = true ctor.workspace = true env_logger.workspace = true parking_lot.workspace = true +reqwest_client.workspace = true +tokio.workspace = true fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index 7380e699f9e715..876d0336dc7077 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -25,7 +25,7 @@ use wit_component::ComponentEncoder; /// Once Rust 1.78 is released, there will be a `wasm32-wasip2` target available, so we will /// not need the adapter anymore. const RUST_TARGET: &str = "wasm32-wasip1"; -const WASI_ADAPTER_URL: &str = +pub const WASI_ADAPTER_URL: &str = "https://github.com/bytecodealliance/wasmtime/releases/download/v18.0.2/wasi_snapshot_preview1.reactor.wasm"; /// Compiling Tree-sitter parsers from C to WASM requires Clang 17, and a WASM build of libc diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 126e6b2cfbdad0..7a3c645e041a14 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -1,3 +1,4 @@ +use crate::extension_builder::WASI_ADAPTER_URL; use crate::extension_manifest::SchemaVersion; use crate::extension_settings::ExtensionSettings; use crate::{ @@ -11,14 +12,14 @@ use collections::BTreeMap; use fs::{FakeFs, Fs, RealFs}; use futures::{io::BufReader, AsyncReadExt, StreamExt}; use gpui::{Context, SemanticVersion, TestAppContext}; -use http_client::{FakeHttpClient, Response}; +use http_client::{AsyncBody, FakeHttpClient, HttpClient, Response}; use indexed_docs::IndexedDocsRegistry; -use isahc_http_client::IsahcHttpClient; use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; use node_runtime::NodeRuntime; use parking_lot::Mutex; use project::{Project, DEFAULT_COMPLETION_CONTEXT}; use release_channel::AppVersion; +use reqwest_client::ReqwestClient; use serde_json::json; use settings::{Settings as _, SettingsStore}; use snippet_provider::SnippetRegistry; @@ -28,6 +29,7 @@ use std::{ sync::Arc, }; use theme::ThemeRegistry; +use ureq_client::UreqClient; use util::test::temp_tree; #[cfg(test)] @@ -576,7 +578,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { std::env::consts::ARCH ) }); - let builder_client = IsahcHttpClient::new(None, Some(user_agent)); + let builder_client = Arc::new(UreqClient::new(None, user_agent, cx.executor().clone())); let extension_store = cx.new_model(|cx| { ExtensionStore::new( @@ -769,6 +771,50 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { assert!(fs.metadata(&expected_server_path).await.unwrap().is_none()); } +#[gpui::test] +async fn test_wasi_adapter_download(cx: &mut TestAppContext) { + let client = Arc::new(UreqClient::new( + None, + "zed-test-wasi-adapter-download".to_string(), + cx.executor().clone(), + )); + + let mut response = client + .get(WASI_ADAPTER_URL, AsyncBody::default(), true) + .await + .unwrap(); + + let mut content = Vec::new(); + let mut body = BufReader::new(response.body_mut()); + body.read_to_end(&mut content).await.unwrap(); + + assert!(wasmparser::Parser::is_core_wasm(&content)); + assert_eq!(content.len(), 96801); // Determined by downloading this to my computer + wit_component::ComponentEncoder::default() + .adapter("wasi_snapshot_preview1", &content) + .unwrap(); +} + +#[tokio::test] +async fn test_wasi_adapter_download_tokio() { + let client = Arc::new(ReqwestClient::new()); + + let mut response = client + .get(WASI_ADAPTER_URL, AsyncBody::default(), true) + .await + .unwrap(); + + let mut content = Vec::new(); + let mut body = BufReader::new(response.body_mut()); + body.read_to_end(&mut content).await.unwrap(); + + assert!(wasmparser::Parser::is_core_wasm(&content)); + assert_eq!(content.len(), 96801); // Determined by downloading this to my computer + wit_component::ComponentEncoder::default() + .adapter("wasi_snapshot_preview1", &content) + .unwrap(); +} + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let store = SettingsStore::test(cx); diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index bc649d8e04989f..3e109a0036b2fc 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -18,7 +18,7 @@ clap = { workspace = true, features = ["derive"] } env_logger.workspace = true extension = { workspace = true, features = ["no-webrtc"] } fs.workspace = true -isahc_http_client.workspace = true +reqwest_client.workspace = true language.workspace = true log.workspace = true rpc.workspace = true diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index 6eaebca2f0e9bf..dd6f2213781197 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -13,8 +13,8 @@ use extension::{ extension_builder::{CompileExtensionOptions, ExtensionBuilder}, ExtensionManifest, }; -use isahc_http_client::IsahcHttpClient; use language::LanguageConfig; +use reqwest_client::ReqwestClient; use theme::ThemeRegistry; use tree_sitter::{Language, Query, WasmStore}; @@ -66,12 +66,7 @@ async fn main() -> Result<()> { std::env::consts::OS, std::env::consts::ARCH ); - let http_client = Arc::new( - IsahcHttpClient::builder() - .default_header("User-Agent", user_agent) - .build() - .map(IsahcHttpClient::from)?, - ); + let http_client = Arc::new(ReqwestClient::user_agent(&user_agent)?); let builder = ExtensionBuilder::new(http_client, scratch_dir); builder diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index 0244ac41042b6f..52c2947b8a7e4f 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -16,7 +16,9 @@ path = "src/http_client.rs" doctest = true [dependencies] -http = "0.2" +http = "1.1" +rustls.workspace = true +rustls-native-certs.workspace = true anyhow.workspace = true derive_more.workspace = true futures.workspace = true diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 2f029a1d236bba..015c73a448c5b8 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -11,13 +11,21 @@ use http::request::Builder; #[cfg(feature = "test-support")] use std::fmt; use std::{ - sync::{Arc, Mutex}, + sync::{Arc, LazyLock, Mutex}, time::Duration, }; pub use url::Url; +#[derive(Clone)] pub struct ReadTimeout(pub Duration); -#[derive(Default, Debug, Clone)] +impl Default for ReadTimeout { + fn default() -> Self { + Self(Duration::from_secs(5)) + } +} + +#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] + pub enum RedirectPolicy { #[default] NoFollow, @@ -26,6 +34,23 @@ pub enum RedirectPolicy { } pub struct FollowRedirects(pub bool); +pub static TLS_CONFIG: LazyLock> = LazyLock::new(|| { + let mut root_store = rustls::RootCertStore::empty(); + + let root_certs = rustls_native_certs::load_native_certs(); + for error in root_certs.errors { + log::warn!("error loading native certs: {:?}", error); + } + root_store.add_parsable_certificates(&root_certs.certs); + + Arc::new( + rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(root_store) + .with_no_client_auth(), + ) +}); + pub trait HttpRequestExt { /// Set a read timeout on the request. /// For isahc, this is the low_speed_timeout. diff --git a/crates/isahc_http_client/LICENSE-APACHE b/crates/isahc_http_client/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3affa..00000000000000 --- a/crates/isahc_http_client/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/isahc_http_client/src/isahc_http_client.rs b/crates/isahc_http_client/src/isahc_http_client.rs deleted file mode 100644 index 778f6a04598909..00000000000000 --- a/crates/isahc_http_client/src/isahc_http_client.rs +++ /dev/null @@ -1,105 +0,0 @@ -use std::{mem, sync::Arc, time::Duration}; - -use futures::future::BoxFuture; -use util::maybe; - -pub use isahc::config::Configurable; -pub struct IsahcHttpClient(isahc::HttpClient); - -pub use http_client::*; - -impl IsahcHttpClient { - pub fn new(proxy: Option, user_agent: Option) -> Arc { - let mut builder = isahc::HttpClient::builder() - .connect_timeout(Duration::from_secs(5)) - .low_speed_timeout(100, Duration::from_secs(5)) - .proxy(proxy.clone()); - if let Some(agent) = user_agent { - builder = builder.default_header("User-Agent", agent); - } - Arc::new(IsahcHttpClient(builder.build().unwrap())) - } - pub fn builder() -> isahc::HttpClientBuilder { - isahc::HttpClientBuilder::new() - } -} - -impl From for IsahcHttpClient { - fn from(client: isahc::HttpClient) -> Self { - Self(client) - } -} - -impl HttpClient for IsahcHttpClient { - fn proxy(&self) -> Option<&Uri> { - None - } - - fn send( - &self, - req: http_client::http::Request, - ) -> BoxFuture<'static, Result, anyhow::Error>> - { - let redirect_policy = req - .extensions() - .get::() - .cloned() - .unwrap_or_default(); - let read_timeout = req - .extensions() - .get::() - .map(|t| t.0); - let req = maybe!({ - let (mut parts, body) = req.into_parts(); - let mut builder = isahc::Request::builder() - .method(parts.method) - .uri(parts.uri) - .version(parts.version); - if let Some(read_timeout) = read_timeout { - builder = builder.low_speed_timeout(100, read_timeout); - } - - let headers = builder.headers_mut()?; - mem::swap(headers, &mut parts.headers); - - let extensions = builder.extensions_mut()?; - mem::swap(extensions, &mut parts.extensions); - - let isahc_body = match body.0 { - http_client::Inner::Empty => isahc::AsyncBody::empty(), - http_client::Inner::AsyncReader(reader) => isahc::AsyncBody::from_reader(reader), - http_client::Inner::SyncReader(reader) => { - isahc::AsyncBody::from_bytes_static(reader.into_inner()) - } - }; - - builder - .redirect_policy(match redirect_policy { - http_client::RedirectPolicy::FollowAll => isahc::config::RedirectPolicy::Follow, - http_client::RedirectPolicy::FollowLimit(limit) => { - isahc::config::RedirectPolicy::Limit(limit) - } - http_client::RedirectPolicy::NoFollow => isahc::config::RedirectPolicy::None, - }) - .body(isahc_body) - .ok() - }); - - let client = self.0.clone(); - - Box::pin(async move { - match req { - Some(req) => client - .send_async(req) - .await - .map_err(Into::into) - .map(|response| { - let (parts, body) = response.into_parts(); - let body = http_client::AsyncBody::from_reader(body); - http_client::Response::from_parts(parts, body) - }), - None => Err(anyhow::anyhow!("Request was malformed")), - } - }) - } -} diff --git a/crates/live_kit_server/Cargo.toml b/crates/live_kit_server/Cargo.toml index bad4c5a05f4754..4b4b5e13dad43b 100644 --- a/crates/live_kit_server/Cargo.toml +++ b/crates/live_kit_server/Cargo.toml @@ -20,7 +20,7 @@ jsonwebtoken.workspace = true log.workspace = true prost.workspace = true prost-types.workspace = true -reqwest = "0.11" +reqwest.workspace = true serde.workspace = true [build-dependencies] diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml new file mode 100644 index 00000000000000..d39319125299f1 --- /dev/null +++ b/crates/reqwest_client/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "reqwest_client" +version = "0.1.0" +edition = "2021" +publish = false +license = "Apache-2.0" + +[lints] +workspace = true + +[features] +test-support = [] + +[lib] +path = "src/reqwest_client.rs" +doctest = true + +[[example]] +name = "client" +path = "examples/client.rs" + +[dependencies] +anyhow.workspace = true +futures.workspace = true +serde.workspace = true +smol.workspace = true +http_client.workspace = true +tokio.workspace = true +bytes = "1.0" + +reqwest = { workspace = true, features = ["rustls-tls-manual-roots", "stream"] } diff --git a/crates/reqwest_client/LICENSE-GPL b/crates/reqwest_client/LICENSE-GPL new file mode 120000 index 00000000000000..89e542f750cd38 --- /dev/null +++ b/crates/reqwest_client/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/reqwest_client/examples/client.rs b/crates/reqwest_client/examples/client.rs new file mode 100644 index 00000000000000..1f50d21e4edc2b --- /dev/null +++ b/crates/reqwest_client/examples/client.rs @@ -0,0 +1,16 @@ +use futures::AsyncReadExt as _; +use http_client::AsyncBody; +use http_client::HttpClient; +use reqwest_client::ReqwestClient; + +#[tokio::main] +async fn main() { + let resp = ReqwestClient::new() + .get("http://zed.dev", AsyncBody::empty(), true) + .await + .unwrap(); + + let mut body = String::new(); + resp.into_body().read_to_string(&mut body).await.unwrap(); + println!("{}", &body); +} diff --git a/crates/reqwest_client/src/reqwest_client.rs b/crates/reqwest_client/src/reqwest_client.rs new file mode 100644 index 00000000000000..6e84c58954ab12 --- /dev/null +++ b/crates/reqwest_client/src/reqwest_client.rs @@ -0,0 +1,232 @@ +use std::{borrow::Cow, io::Read, pin::Pin, task::Poll}; + +use anyhow::anyhow; +use bytes::{BufMut, Bytes, BytesMut}; +use futures::{AsyncRead, TryStreamExt}; +use http_client::{http, AsyncBody, ReadTimeout}; +use reqwest::header::{HeaderMap, HeaderValue}; +use smol::future::FutureExt; + +const DEFAULT_CAPACITY: usize = 4096; + +pub struct ReqwestClient { + client: reqwest::Client, +} + +impl ReqwestClient { + pub fn new() -> Self { + Self { + client: reqwest::Client::new(), + } + } + + pub fn user_agent(agent: &str) -> anyhow::Result { + let mut map = HeaderMap::new(); + map.insert(http::header::USER_AGENT, HeaderValue::from_str(agent)?); + Ok(Self { + client: reqwest::Client::builder().default_headers(map).build()?, + }) + } +} + +impl From for ReqwestClient { + fn from(client: reqwest::Client) -> Self { + Self { client } + } +} + +// This struct is essentially a re-implementation of +// https://docs.rs/tokio-util/0.7.12/tokio_util/io/struct.ReaderStream.html +// except outside of Tokio's aegis +struct ReaderStream { + reader: Option>>, + buf: BytesMut, + capacity: usize, +} + +impl ReaderStream { + fn new(reader: Pin>) -> Self { + Self { + reader: Some(reader), + buf: BytesMut::new(), + capacity: DEFAULT_CAPACITY, + } + } +} + +impl futures::Stream for ReaderStream { + type Item = std::io::Result; + + fn poll_next( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> Poll> { + let mut this = self.as_mut(); + + let mut reader = match this.reader.take() { + Some(r) => r, + None => return Poll::Ready(None), + }; + + if this.buf.capacity() == 0 { + let capacity = this.capacity; + this.buf.reserve(capacity); + } + + match poll_read_buf(&mut reader, cx, &mut this.buf) { + Poll::Pending => Poll::Pending, + Poll::Ready(Err(err)) => { + self.reader = None; + + Poll::Ready(Some(Err(err))) + } + Poll::Ready(Ok(0)) => { + self.reader = None; + Poll::Ready(None) + } + Poll::Ready(Ok(_)) => { + let chunk = this.buf.split(); + self.reader = Some(reader); + Poll::Ready(Some(Ok(chunk.freeze()))) + } + } + } +} + +/// Implementation from https://docs.rs/tokio-util/0.7.12/src/tokio_util/util/poll_buf.rs.html +/// Specialized for this use case +pub fn poll_read_buf( + io: &mut Pin>, + cx: &mut std::task::Context<'_>, + buf: &mut BytesMut, +) -> Poll> { + if !buf.has_remaining_mut() { + return Poll::Ready(Ok(0)); + } + + let n = { + let dst = buf.chunk_mut(); + + // Safety: `chunk_mut()` returns a `&mut UninitSlice`, and `UninitSlice` is a + // transparent wrapper around `[MaybeUninit]`. + let dst = unsafe { &mut *(dst as *mut _ as *mut [std::mem::MaybeUninit]) }; + let mut buf = tokio::io::ReadBuf::uninit(dst); + let ptr = buf.filled().as_ptr(); + let unfilled_portion = buf.initialize_unfilled(); + // SAFETY: Pin projection + let io_pin = unsafe { Pin::new_unchecked(io) }; + std::task::ready!(io_pin.poll_read(cx, unfilled_portion)?); + + // Ensure the pointer does not change from under us + assert_eq!(ptr, buf.filled().as_ptr()); + buf.filled().len() + }; + + // Safety: This is guaranteed to be the number of initialized (and read) + // bytes due to the invariants provided by `ReadBuf::filled`. + unsafe { + buf.advance_mut(n); + } + + Poll::Ready(Ok(n)) +} + +enum WrappedBodyInner { + None, + SyncReader(std::io::Cursor>), + Stream(ReaderStream), +} + +struct WrappedBody(WrappedBodyInner); + +impl WrappedBody { + fn new(body: AsyncBody) -> Self { + match body.0 { + http_client::Inner::Empty => Self(WrappedBodyInner::None), + http_client::Inner::SyncReader(cursor) => Self(WrappedBodyInner::SyncReader(cursor)), + http_client::Inner::AsyncReader(pin) => { + Self(WrappedBodyInner::Stream(ReaderStream::new(pin))) + } + } + } +} + +impl futures::stream::Stream for WrappedBody { + type Item = Result; + + fn poll_next( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + match &mut self.0 { + WrappedBodyInner::None => Poll::Ready(None), + WrappedBodyInner::SyncReader(cursor) => { + let mut buf = Vec::new(); + match cursor.read_to_end(&mut buf) { + Ok(_) => { + return Poll::Ready(Some(Ok(Bytes::from(buf)))); + } + Err(e) => return Poll::Ready(Some(Err(e))), + } + } + WrappedBodyInner::Stream(stream) => { + // SAFETY: Pin projection + let stream = unsafe { Pin::new_unchecked(stream) }; + futures::Stream::poll_next(stream, cx) + } + } + } +} + +impl http_client::HttpClient for ReqwestClient { + fn proxy(&self) -> Option<&http::Uri> { + None + } + + fn send( + &self, + req: http::Request, + ) -> futures::future::BoxFuture< + 'static, + Result, anyhow::Error>, + > { + let (parts, body) = req.into_parts(); + + let mut request = self.client.request(parts.method, parts.uri.to_string()); + + request = request.headers(parts.headers); + + if let Some(redirect_policy) = parts.extensions.get::() { + request = request.redirect_policy(match redirect_policy { + http_client::RedirectPolicy::NoFollow => reqwest::redirect::Policy::none(), + http_client::RedirectPolicy::FollowLimit(limit) => { + reqwest::redirect::Policy::limited(*limit as usize) + } + http_client::RedirectPolicy::FollowAll => reqwest::redirect::Policy::limited(100), + }); + } + + if let Some(ReadTimeout(timeout)) = parts.extensions.get::() { + request = request.timeout(*timeout); + } + + let body = WrappedBody::new(body); + let request = request.body(reqwest::Body::wrap_stream(body)); + + async move { + let response = request.send().await.map_err(|e| anyhow!(e))?; + let status = response.status(); + let mut builder = http::Response::builder().status(status.as_u16()); + for (name, value) in response.headers() { + builder = builder.header(name, value); + } + let bytes = response.bytes_stream(); + let bytes = bytes + .map_err(|e| futures::io::Error::new(futures::io::ErrorKind::Other, e)) + .into_async_read(); + let body = http_client::AsyncBody::from_reader(bytes); + builder.body(body).map_err(|e| anyhow!(e)) + } + .boxed() + } +} diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index 691d6e57f6d448..8842093f7857ff 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -51,7 +51,7 @@ workspace.workspace = true worktree.workspace = true [dev-dependencies] -isahc_http_client.workspace = true +ureq_client.workspace = true env_logger.workspace = true client = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index c5c2c633a10603..1ebed4c17f3bda 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -2,7 +2,6 @@ use client::Client; use futures::channel::oneshot; use gpui::App; use http_client::HttpClientWithUrl; -use isahc_http_client::IsahcHttpClient; use language::language_settings::AllLanguageSettings; use project::Project; use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb}; @@ -29,7 +28,11 @@ fn main() { let clock = Arc::new(FakeSystemClock::default()); let http = Arc::new(HttpClientWithUrl::new( - IsahcHttpClient::new(None, None), + Arc::new(ureq_client::UreqClient::new( + None, + "Zed semantic index example".to_string(), + cx.background_executor().clone(), + )), "http://localhost:11434", None, )); diff --git a/crates/isahc_http_client/Cargo.toml b/crates/ureq_client/Cargo.toml similarity index 52% rename from crates/isahc_http_client/Cargo.toml rename to crates/ureq_client/Cargo.toml index 82f7621bf8cace..a14419a2261a05 100644 --- a/crates/isahc_http_client/Cargo.toml +++ b/crates/ureq_client/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "isahc_http_client" +name = "ureq_client" version = "0.1.0" edition = "2021" publish = false @@ -12,11 +12,21 @@ workspace = true test-support = [] [lib] -path = "src/isahc_http_client.rs" +path = "src/ureq_client.rs" +doctest = true + +[[example]] +name = "client" +path = "examples/client.rs" [dependencies] anyhow.workspace = true futures.workspace = true +serde.workspace = true +smol.workspace = true +gpui.workspace = true http_client.workspace = true -isahc.workspace = true util.workspace = true +parking_lot.workspace = true + +ureq = "=2.9.1" diff --git a/crates/ureq_client/LICENSE-GPL b/crates/ureq_client/LICENSE-GPL new file mode 120000 index 00000000000000..89e542f750cd38 --- /dev/null +++ b/crates/ureq_client/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/ureq_client/examples/client.rs b/crates/ureq_client/examples/client.rs new file mode 100644 index 00000000000000..c5caae40dac9ec --- /dev/null +++ b/crates/ureq_client/examples/client.rs @@ -0,0 +1,24 @@ +use futures::AsyncReadExt; +use http_client::{AsyncBody, HttpClient}; +use ureq_client::UreqClient; + +fn main() { + gpui::App::headless().run(|cx| { + println!("{:?}", std::thread::current().id()); + cx.spawn(|cx| async move { + let resp = UreqClient::new( + None, + "Conrad's bot".to_string(), + cx.background_executor().clone(), + ) + .get("http://zed.dev", AsyncBody::empty(), true) + .await + .unwrap(); + + let mut body = String::new(); + resp.into_body().read_to_string(&mut body).await.unwrap(); + println!("{}", body); + }) + .detach(); + }) +} diff --git a/crates/ureq_client/src/ureq_client.rs b/crates/ureq_client/src/ureq_client.rs new file mode 100644 index 00000000000000..8951e80ac2df5e --- /dev/null +++ b/crates/ureq_client/src/ureq_client.rs @@ -0,0 +1,187 @@ +use std::collections::HashMap; +use std::io::Read; +use std::sync::Arc; +use std::time::Duration; +use std::{pin::Pin, task::Poll}; + +use anyhow::Error; +use futures::channel::mpsc; +use futures::future::BoxFuture; +use futures::{AsyncRead, SinkExt, StreamExt}; +use http_client::{http, AsyncBody, HttpClient, RedirectPolicy, Uri}; +use smol::future::FutureExt; +use util::ResultExt; + +pub struct UreqClient { + // Note in ureq 2.x the options are stored on the Agent. + // In ureq 3.x we'll be able to set these on the request. + // In practice it's probably "fine" to have many clients, the number of distinct options + // is low; and most requests to the same connection will have the same options so the + // connection pool will work. + clients: Arc>>, + proxy_url: Option, + proxy: Option, + user_agent: String, + background_executor: gpui::BackgroundExecutor, +} + +impl UreqClient { + pub fn new( + proxy_url: Option, + user_agent: String, + background_executor: gpui::BackgroundExecutor, + ) -> Self { + Self { + clients: Arc::default(), + proxy_url: proxy_url.clone(), + proxy: proxy_url.and_then(|url| ureq::Proxy::new(url.to_string()).log_err()), + user_agent, + background_executor, + } + } + + fn agent_for(&self, redirect_policy: RedirectPolicy, timeout: Duration) -> ureq::Agent { + let mut clients = self.clients.lock(); + // in case our assumption of distinct options is wrong, we'll sporadically clean it out. + if clients.len() > 50 { + clients.clear() + } + + clients + .entry((timeout, redirect_policy.clone())) + .or_insert_with(|| { + let mut builder = ureq::AgentBuilder::new() + .timeout_connect(Duration::from_secs(5)) + .timeout_read(timeout) + .timeout_write(timeout) + .user_agent(&self.user_agent) + .tls_config(http_client::TLS_CONFIG.clone()) + .redirects(match redirect_policy { + RedirectPolicy::NoFollow => 0, + RedirectPolicy::FollowLimit(limit) => limit, + RedirectPolicy::FollowAll => 100, + }); + if let Some(proxy) = &self.proxy { + builder = builder.proxy(proxy.clone()); + } + builder.build() + }) + .clone() + } +} +impl HttpClient for UreqClient { + fn proxy(&self) -> Option<&Uri> { + self.proxy_url.as_ref() + } + + fn send( + &self, + request: http::Request, + ) -> BoxFuture<'static, Result, Error>> { + let agent = self.agent_for( + request + .extensions() + .get::() + .cloned() + .unwrap_or_default(), + request + .extensions() + .get::() + .cloned() + .unwrap_or_default() + .0, + ); + let mut req = agent.request(&request.method().as_ref(), &request.uri().to_string()); + for (name, value) in request.headers().into_iter() { + req = req.set(name.as_str(), value.to_str().unwrap()); + } + let body = request.into_body(); + let executor = self.background_executor.clone(); + + self.background_executor + .spawn(async move { + let response = req.send(body)?; + + let mut builder = http::Response::builder() + .status(response.status()) + .version(http::Version::HTTP_11); + for name in response.headers_names() { + if let Some(value) = response.header(&name) { + builder = builder.header(name, value); + } + } + + let body = AsyncBody::from_reader(UreqResponseReader::new(executor, response)); + let http_response = builder.body(body)?; + + Ok(http_response) + }) + .boxed() + } +} + +struct UreqResponseReader { + receiver: mpsc::Receiver>>, + buffer: Vec, + idx: usize, + _task: gpui::Task<()>, +} + +impl UreqResponseReader { + fn new(background_executor: gpui::BackgroundExecutor, response: ureq::Response) -> Self { + let (mut sender, receiver) = mpsc::channel(1); + let mut reader = response.into_reader(); + let task = background_executor.spawn(async move { + let mut buffer = vec![0; 8192]; + loop { + let n = match reader.read(&mut buffer) { + Ok(0) => break, + Ok(n) => n, + Err(e) => { + let _ = sender.send(Err(e)).await; + break; + } + }; + let _ = sender.send(Ok(buffer[..n].to_vec())).await; + } + }); + + UreqResponseReader { + _task: task, + receiver, + buffer: Vec::new(), + idx: 0, + } + } +} + +impl AsyncRead for UreqResponseReader { + fn poll_read( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut [u8], + ) -> Poll> { + if self.buffer.is_empty() { + match self.receiver.poll_next_unpin(cx) { + Poll::Ready(Some(Ok(data))) => self.buffer = data, + Poll::Ready(Some(Err(e))) => { + return Poll::Ready(Err(e)); + } + Poll::Ready(None) => { + return Poll::Ready(Ok(0)); + } + Poll::Pending => { + return Poll::Pending; + } + } + } + let n = std::cmp::min(buf.len(), self.buffer.len() - self.idx); + buf[..n].copy_from_slice(&self.buffer[self.idx..self.idx + n]); + self.idx += n; + if self.idx == self.buffer.len() { + self.buffer.clear(); + self.idx = 0; + } + Poll::Ready(Ok(n)) + } +} diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index dcbf2e8b597a34..99394b7922c4fa 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -17,7 +17,7 @@ neovim = ["nvim-rs", "async-compat", "async-trait", "tokio"] [dependencies] anyhow.workspace = true -async-compat = { version = "0.2.1", "optional" = true } +async-compat = { workspace = true, "optional" = true } async-trait = { workspace = true, "optional" = true } collections.workspace = true command_palette.workspace = true diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index e22f75f5bb3e54..ac73bf15eea18d 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -57,7 +57,7 @@ http_client.workspace = true image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true -isahc_http_client.workspace = true +ureq_client.workspace = true journal.workspace = true language.workspace = true language_model.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 06f1d926aea37d..adb5feb9fe5bdc 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -24,9 +24,9 @@ use gpui::{ UpdateGlobal as _, VisualContext, }; use http_client::{read_proxy_from_env, Uri}; -use isahc_http_client::IsahcHttpClient; use language::LanguageRegistry; use log::LevelFilter; +use ureq_client::UreqClient; use assets::Assets; use node_runtime::{NodeBinaryOptions, NodeRuntime}; @@ -334,9 +334,7 @@ fn main() { log::info!("========== starting zed =========="); - let app = App::new() - .with_assets(Assets) - .with_http_client(IsahcHttpClient::new(None, None)); + let app = App::new().with_assets(Assets); let system_id = app.background_executor().block(system_id()).ok(); let installation_id = app.background_executor().block(installation_id()).ok(); @@ -470,8 +468,8 @@ fn main() { .ok() }) .or_else(read_proxy_from_env); - let http = IsahcHttpClient::new(proxy_url, Some(user_agent)); - cx.set_http_client(http); + let http = UreqClient::new(proxy_url, user_agent, cx.background_executor().clone()); + cx.set_http_client(Arc::new(http)); ::set_global(fs.clone(), cx); From 9565a90528056988a402280c9303f55843bc63fb Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 16:10:25 -0400 Subject: [PATCH 452/762] collab: Revert changes to Clickhouse event rows (#18654) This PR reverts the changes to the Clickhouse event rows that were included in https://github.com/zed-industries/zed/pull/18414. The changes don't seem to be correct, as they make the row structs differ from the underlying table schema. Release Notes: - N/A --- crates/collab/src/api/events.rs | 40 ++++++++++++++++----------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index dd1370e8866442..bbfa69c0b8f70b 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -674,7 +674,7 @@ pub struct EditorEventRow { copilot_enabled_for_language: bool, historical_event: bool, architecture: String, - is_staff: bool, + is_staff: Option, major: Option, minor: Option, patch: Option, @@ -708,7 +708,7 @@ impl EditorEventRow { installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), metrics_id: body.metrics_id.clone().unwrap_or_default(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, file_extension: event.file_extension.unwrap_or_default(), @@ -741,7 +741,7 @@ pub struct InlineCompletionEventRow { region_code: String, city: String, time: i64, - is_staff: bool, + is_staff: Option, major: Option, minor: Option, patch: Option, @@ -772,7 +772,7 @@ impl InlineCompletionEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), file_extension: event.file_extension.unwrap_or_default(), signed_in: wrapper.signed_in, @@ -800,7 +800,7 @@ pub struct CallEventRow { // ClientEventBase installation_id: String, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // CallEventRow @@ -832,7 +832,7 @@ impl CallEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, room_id: event.room_id, @@ -856,7 +856,7 @@ pub struct AssistantEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // AssistantEventRow @@ -891,7 +891,7 @@ impl AssistantEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), conversation_id: event.conversation_id.unwrap_or_default(), kind: event.kind.to_string(), @@ -909,7 +909,7 @@ impl AssistantEventRow { pub struct CpuEventRow { installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, usage_as_percentage: f32, core_count: u32, app_version: String, @@ -947,7 +947,7 @@ impl CpuEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), usage_as_percentage: event.usage_as_percentage, core_count: event.core_count, @@ -970,7 +970,7 @@ pub struct MemoryEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // MemoryEventRow @@ -1001,7 +1001,7 @@ impl MemoryEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), memory_in_bytes: event.memory_in_bytes, virtual_memory_in_bytes: event.virtual_memory_in_bytes, @@ -1024,7 +1024,7 @@ pub struct AppEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // AppEventRow @@ -1054,7 +1054,7 @@ impl AppEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, } @@ -1076,7 +1076,7 @@ pub struct SettingEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // SettingEventRow setting: String, @@ -1106,7 +1106,7 @@ impl SettingEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), setting: event.setting, value: event.value, @@ -1129,7 +1129,7 @@ pub struct ExtensionEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // ExtensionEventRow @@ -1164,7 +1164,7 @@ impl ExtensionEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), extension_id: event.extension_id, extension_version: event.version, @@ -1198,7 +1198,7 @@ pub struct ReplEventRow { // ClientEventBase installation_id: Option, session_id: Option, - is_staff: bool, + is_staff: Option, time: i64, // ReplEventRow @@ -1230,7 +1230,7 @@ impl ReplEventRow { os_version: body.os_version.clone().unwrap_or_default(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), - is_staff: body.is_staff.unwrap_or_default(), + is_staff: body.is_staff, time: time.timestamp_millis(), kernel_language: event.kernel_language, kernel_status: event.kernel_status, From 6f4385e73741b0cba6cb3028ecd8d4d76086ba4b Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 2 Oct 2024 16:26:48 -0400 Subject: [PATCH 453/762] Sort dependencies in `Cargo.toml` files (#18657) This PR sorts the dependencies in various `Cargo.toml` files after #18414. Release Notes: - N/A --- Cargo.toml | 24 ++++++++++++------------ crates/collab/Cargo.toml | 12 ++++++------ crates/evals/Cargo.toml | 12 ++++++------ crates/extension/Cargo.toml | 17 ++++++++--------- crates/extension_cli/Cargo.toml | 2 +- crates/http_client/Cargo.toml | 6 +++--- crates/reqwest_client/Cargo.toml | 4 ++-- crates/semantic_index/Cargo.toml | 18 +++++++++--------- crates/ureq_client/Cargo.toml | 7 +++---- crates/vim/Cargo.toml | 10 ++++------ crates/zed/Cargo.toml | 2 +- 11 files changed, 55 insertions(+), 59 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index fea528db5b89ee..8feb93a57856a2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,7 +7,6 @@ members = [ "crates/assistant", "crates/assistant_slash_command", "crates/assistant_tool", - "crates/ureq_client", "crates/audio", "crates/auto_update", "crates/breadcrumbs", @@ -87,8 +86,8 @@ members = [ "crates/release_channel", "crates/remote", "crates/remote_server", - "crates/reqwest_client", "crates/repl", + "crates/reqwest_client", "crates/rich_text", "crates/rope", "crates/rpc", @@ -123,6 +122,7 @@ members = [ "crates/ui", "crates/ui_input", "crates/ui_macros", + "crates/ureq_client", "crates/util", "crates/vcs_menu", "crates/vim", @@ -176,6 +176,7 @@ members = [ default-members = ["crates/zed"] [workspace.dependencies] + # # Workspace member crates # @@ -187,8 +188,6 @@ assets = { path = "crates/assets" } assistant = { path = "crates/assistant" } assistant_slash_command = { path = "crates/assistant_slash_command" } assistant_tool = { path = "crates/assistant_tool" } -ureq_client = { path = "crates/ureq_client" } -async-compat = { version = "0.2.1" } audio = { path = "crates/audio" } auto_update = { path = "crates/auto_update" } breadcrumbs = { path = "crates/breadcrumbs" } @@ -223,7 +222,6 @@ go_to_line = { path = "crates/go_to_line" } google_ai = { path = "crates/google_ai" } gpui = { path = "crates/gpui" } gpui_macros = { path = "crates/gpui_macros" } -handlebars = "4.3" headless = { path = "crates/headless" } html_to_markdown = { path = "crates/html_to_markdown" } http_client = { path = "crates/http_client" } @@ -302,6 +300,7 @@ title_bar = { path = "crates/title_bar" } ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } ui_macros = { path = "crates/ui_macros" } +ureq_client = { path = "crates/ureq_client" } util = { path = "crates/util" } vcs_menu = { path = "crates/vcs_menu" } vim = { path = "crates/vim" } @@ -321,6 +320,7 @@ any_vec = "0.14" anyhow = "1.0.86" arrayvec = { version = "0.7.4", features = ["serde"] } ashpd = "0.9.1" +async-compat = "0.2.1" async-compression = { version = "0.4", features = ["gzip", "futures-io"] } async-dispatcher = "0.1" async-fs = "1.6" @@ -359,15 +359,15 @@ futures-batch = "0.6.1" futures-lite = "1.13" git2 = { version = "0.19", default-features = false } globset = "0.4" +handlebars = "4.3" heed = { version = "0.20.1", features = ["read-txn-no-tls"] } hex = "0.4.3" -hyper = "0.14" html5ever = "0.27.0" +hyper = "0.14" ignore = "0.4.22" image = "0.25.1" indexmap = { version = "1.6.2", features = ["serde"] } indoc = "2" - itertools = "0.13.0" jsonwebtoken = "9.3" libc = "0.2" @@ -382,17 +382,18 @@ ordered-float = "2.1.1" palette = { version = "0.7.5", default-features = false, features = ["std"] } parking_lot = "0.12.1" pathdiff = "0.2" -profiling = "1" postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = "1.3.0" +profiling = "1" prost = "0.9" prost-build = "0.9" prost-types = "0.9" +protols-tree-sitter-proto = { git = "https://github.com/zed-industries/tree-sitter-proto", rev = "0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" } pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" regex = "1.5" repair_json = "0.1.0" -reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29" } +reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29" } rsa = "0.9.6" runtimelib = { version = "0.15", default-features = false, features = [ "async-dispatcher-runtime", @@ -453,15 +454,14 @@ tree-sitter-html = "0.20" tree-sitter-jsdoc = "0.23" tree-sitter-json = "0.23" tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "4cfa6aad6b75052a5077c80fd934757d9267d81b" } -protols-tree-sitter-proto = { git = "https://github.com/zed-industries/tree-sitter-proto", rev = "0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" } tree-sitter-python = "0.23" tree-sitter-regex = "0.23" tree-sitter-ruby = "0.23" tree-sitter-rust = "0.23" tree-sitter-typescript = "0.23" -tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } -unindent = "0.1.7" +tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } unicase = "2.6" +unindent = "0.1.7" unicode-segmentation = "1.10" url = "2.2" uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 7d4c5d0c706b7e..de7a3c621465e0 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -28,8 +28,8 @@ axum = { version = "0.6", features = ["json", "headers", "ws"] } axum-extra = { version = "0.4", features = ["erased-json"] } base64.workspace = true chrono.workspace = true -clock.workspace = true clickhouse.workspace = true +clock.workspace = true collections.workspace = true dashmap.workspace = true envy = "0.4.2" @@ -37,19 +37,19 @@ futures.workspace = true google_ai.workspace = true hex.workspace = true http_client.workspace = true -reqwest_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true log.workspace = true nanoid.workspace = true open_ai.workspace = true -supermaven_api.workspace = true parking_lot.workspace = true prometheus = "0.13" prost.workspace = true rand.workspace = true reqwest = { version = "0.11", features = ["json"] } +reqwest_client.workspace = true rpc.workspace = true +rustc-demangle.workspace = true scrypt = "0.11" sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] } semantic_version.workspace = true @@ -61,7 +61,7 @@ sha2.workspace = true sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] } strum.workspace = true subtle.workspace = true -rustc-demangle.workspace = true +supermaven_api.workspace = true telemetry_events.workspace = true text.workspace = true thiserror.workspace = true @@ -85,6 +85,7 @@ client = { workspace = true, features = ["test-support"] } collab_ui = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } ctor.workspace = true +dev_server_projects.workspace = true editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true file_finder.workspace = true @@ -92,6 +93,7 @@ fs = { workspace = true, features = ["test-support"] } git = { workspace = true, features = ["test-support"] } git_hosting_providers.workspace = true gpui = { workspace = true, features = ["test-support"] } +headless.workspace = true hyper.workspace = true indoc.workspace = true language = { workspace = true, features = ["test-support"] } @@ -108,7 +110,6 @@ recent_projects = { workspace = true } release_channel.workspace = true remote = { workspace = true, features = ["test-support"] } remote_server.workspace = true -dev_server_projects.workspace = true rpc = { workspace = true, features = ["test-support"] } sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-sqlite"] } serde_json.workspace = true @@ -120,7 +121,6 @@ unindent.workspace = true util.workspace = true workspace = { workspace = true, features = ["test-support"] } worktree = { workspace = true, features = ["test-support"] } -headless.workspace = true [package.metadata.cargo-machete] ignored = ["async-stripe"] diff --git a/crates/evals/Cargo.toml b/crates/evals/Cargo.toml index 52af0ce446f918..2697b768453f16 100644 --- a/crates/evals/Cargo.toml +++ b/crates/evals/Cargo.toml @@ -14,9 +14,8 @@ name = "eval" path = "src/eval.rs" [dependencies] -clap.workspace = true anyhow.workspace = true -ureq_client.workspace = true +clap.workspace = true client.workspace = true clock.workspace = true collections.workspace = true @@ -25,14 +24,15 @@ feature_flags.workspace = true fs.workspace = true git.workspace = true gpui.workspace = true +http_client.workspace = true language.workspace = true languages.workspace = true -http_client.workspace = true +node_runtime.workspace = true open_ai.workspace = true project.workspace = true -settings.workspace = true +semantic_index.workspace = true serde.workspace = true serde_json.workspace = true +settings.workspace = true smol.workspace = true -semantic_index.workspace = true -node_runtime.workspace = true +ureq_client.workspace = true diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index 9fea3a768a0c02..2b1d6193f86699 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -39,32 +39,31 @@ schemars.workspace = true semantic_version.workspace = true serde.workspace = true serde_json.workspace = true +serde_json_lenient.workspace = true settings.workspace = true snippet_provider.workspace = true +task.workspace = true theme.workspace = true toml.workspace = true ui.workspace = true url.workspace = true util.workspace = true wasm-encoder.workspace = true -wasmtime.workspace = true -wasmtime-wasi.workspace = true wasmparser.workspace = true +wasmtime-wasi.workspace = true +wasmtime.workspace = true wit-component.workspace = true workspace.workspace = true -task.workspace = true -serde_json_lenient.workspace = true [dev-dependencies] -ureq_client.workspace = true ctor.workspace = true env_logger.workspace = true -parking_lot.workspace = true -reqwest_client.workspace = true -tokio.workspace = true - fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } +parking_lot.workspace = true project = { workspace = true, features = ["test-support"] } +reqwest_client.workspace = true +tokio.workspace = true +ureq_client.workspace = true workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index 3e109a0036b2fc..6de3e858d4fff5 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -18,9 +18,9 @@ clap = { workspace = true, features = ["derive"] } env_logger.workspace = true extension = { workspace = true, features = ["no-webrtc"] } fs.workspace = true -reqwest_client.workspace = true language.workspace = true log.workspace = true +reqwest_client.workspace = true rpc.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index 52c2947b8a7e4f..e8585cff9820fd 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -16,13 +16,13 @@ path = "src/http_client.rs" doctest = true [dependencies] -http = "1.1" -rustls.workspace = true -rustls-native-certs.workspace = true anyhow.workspace = true derive_more.workspace = true futures.workspace = true +http = "1.1" log.workspace = true +rustls-native-certs.workspace = true +rustls.workspace = true serde.workspace = true serde_json.workspace = true smol.workspace = true diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml index d39319125299f1..060a382d72ee43 100644 --- a/crates/reqwest_client/Cargo.toml +++ b/crates/reqwest_client/Cargo.toml @@ -21,11 +21,11 @@ path = "examples/client.rs" [dependencies] anyhow.workspace = true +bytes = "1.0" futures.workspace = true +http_client.workspace = true serde.workspace = true smol.workspace = true -http_client.workspace = true tokio.workspace = true -bytes = "1.0" reqwest = { workspace = true, features = ["rustls-tls-manual-roots", "stream"] } diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index 8842093f7857ff..508e64ffea3626 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -26,42 +26,42 @@ clock.workspace = true collections.workspace = true feature_flags.workspace = true fs.workspace = true -futures.workspace = true futures-batch.workspace = true +futures.workspace = true gpui.workspace = true +heed.workspace = true +http_client.workspace = true language.workspace = true language_model.workspace = true log.workspace = true -heed.workspace = true -http_client.workspace = true open_ai.workspace = true parking_lot.workspace = true project.workspace = true -settings.workspace = true serde.workspace = true serde_json.workspace = true +settings.workspace = true sha2.workspace = true smol.workspace = true theme.workspace = true tree-sitter.workspace = true ui. workspace = true -util. workspace = true unindent.workspace = true +util. workspace = true workspace.workspace = true worktree.workspace = true [dev-dependencies] -ureq_client.workspace = true -env_logger.workspace = true client = { workspace = true, features = ["test-support"] } +env_logger.workspace = true fs = { workspace = true, features = ["test-support"] } futures.workspace = true gpui = { workspace = true, features = ["test-support"] } +http_client = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] } languages.workspace = true project = { workspace = true, features = ["test-support"] } tempfile.workspace = true +ureq_client.workspace = true util = { workspace = true, features = ["test-support"] } -worktree = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } +worktree = { workspace = true, features = ["test-support"] } diff --git a/crates/ureq_client/Cargo.toml b/crates/ureq_client/Cargo.toml index a14419a2261a05..757ba010946c70 100644 --- a/crates/ureq_client/Cargo.toml +++ b/crates/ureq_client/Cargo.toml @@ -22,11 +22,10 @@ path = "examples/client.rs" [dependencies] anyhow.workspace = true futures.workspace = true -serde.workspace = true -smol.workspace = true gpui.workspace = true http_client.workspace = true -util.workspace = true parking_lot.workspace = true - +serde.workspace = true +smol.workspace = true ureq = "=2.9.1" +util.workspace = true diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index 99394b7922c4fa..bb347f49b79d84 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -28,10 +28,9 @@ itertools.workspace = true language.workspace = true log.workspace = true multi_buffer.workspace = true -nvim-rs = { git = "https://github.com/KillTheMule/nvim-rs", branch = "master", features = [ - "use_tokio", -], optional = true } +nvim-rs = { git = "https://github.com/KillTheMule/nvim-rs", branch = "master", features = ["use_tokio"], optional = true } regex.workspace = true +schemars.workspace = true search.workspace = true serde.workspace = true serde_derive.workspace = true @@ -39,21 +38,20 @@ serde_json.workspace = true settings.workspace = true tokio = { version = "1.15", "optional" = true } ui.workspace = true +util.workspace = true workspace.workspace = true zed_actions.workspace = true -schemars.workspace = true -util.workspace = true [dev-dependencies] command_palette.workspace = true editor = { workspace = true, features = ["test-support"] } futures.workspace = true gpui = { workspace = true, features = ["test-support"] } -release_channel.workspace = true indoc.workspace = true language = { workspace = true, features = ["test-support"] } lsp = { workspace = true, features = ["test-support"] } parking_lot.workspace = true +release_channel.workspace = true settings.workspace = true util = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index ac73bf15eea18d..e340e176a74146 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -57,7 +57,6 @@ http_client.workspace = true image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true -ureq_client.workspace = true journal.workspace = true language.workspace = true language_model.workspace = true @@ -108,6 +107,7 @@ theme.workspace = true theme_selector.workspace = true time.workspace = true ui.workspace = true +ureq_client.workspace = true url.workspace = true urlencoding = "2.1.2" util.workspace = true From e2d613a803e0f04c6365d832e5300059ab493ae3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 17:39:32 -0400 Subject: [PATCH 454/762] Update Rust crate clap to v4.5.19 (#18660) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [clap](https://redirect.github.com/clap-rs/clap) | workspace.dependencies | patch | `4.5.18` -> `4.5.19` | --- ### Release Notes
clap-rs/clap (clap) ### [`v4.5.19`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4519---2024-10-01) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.18...v4.5.19) ##### Internal - Update dependencies
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a96e59df34c1db..187d0c92c806c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2275,9 +2275,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.18" +version = "4.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3" +checksum = "7be5744db7978a28d9df86a214130d106a89ce49644cbc4e3f0c22c3fba30615" dependencies = [ "clap_builder", "clap_derive", @@ -2285,9 +2285,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.18" +version = "4.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b" +checksum = "a5fbc17d3ef8278f55b282b2a2e75ae6f6c7d4bb70ed3d0382375104bfafdb4b" dependencies = [ "anstream", "anstyle", @@ -11571,12 +11571,12 @@ dependencies = [ [[package]] name = "terminal_size" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" +checksum = "4f599bd7ca042cfdf8f4512b277c02ba102247820f9d9d4a9f521f496751a6ef" dependencies = [ "rustix 0.38.35", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] From 19b186671b85cb587c187bd33b1ccead43abe49a Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 3 Oct 2024 00:35:56 +0200 Subject: [PATCH 455/762] ssh: Add session state indicator to title bar (#18645) ![image](https://github.com/user-attachments/assets/0ed6f59c-e0e7-49e6-8db7-f09ec5cdf653) The indicator turns yellow when ssh client is trying to reconnect. Note that the state tracking is probably not ideal (we'll see how it pans out once we start dog-fooding), but at the very least "green=good" should be a decent mental model for now. Release Notes: - N/A --- crates/project/src/project.rs | 9 +++- crates/recent_projects/src/ssh_connections.rs | 1 + crates/remote/src/ssh_session.rs | 16 +++++-- crates/title_bar/src/title_bar.rs | 45 ++++++++++++++++++- 4 files changed, 66 insertions(+), 5 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index dadbd394bbf9b5..59c2c895cddcb6 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1217,7 +1217,10 @@ impl Project { server.ssh_connection_string.is_some() } - pub fn ssh_connection_string(&self, cx: &ModelContext) -> Option { + pub fn ssh_connection_string(&self, cx: &AppContext) -> Option { + if let Some(ssh_state) = &self.ssh_client { + return Some(ssh_state.connection_string().into()); + } let dev_server_id = self.dev_server_project_id()?; dev_server_projects::Store::global(cx) .read(cx) @@ -1226,6 +1229,10 @@ impl Project { .clone() } + pub fn ssh_is_connected(&self) -> Option { + Some(!self.ssh_client.as_ref()?.is_reconnect_underway()) + } + pub fn replica_id(&self) -> ReplicaId { match self.client_state { ProjectClientState::Remote { replica_id, .. } => replica_id, diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index d0fffc031f0bff..1aff16a4a44f71 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -317,6 +317,7 @@ impl SshClientDelegate { if release_channel == ReleaseChannel::Dev && platform.arch == std::env::consts::ARCH && platform.os == std::env::consts::OS + && false { use smol::process::{Command, Stdio}; diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 6bca9938baac71..89ec5db949aa70 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -36,6 +36,7 @@ use std::{ time::Instant, }; use tempfile::TempDir; +use util::maybe; #[derive( Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, @@ -48,7 +49,7 @@ pub struct SshSocket { socket_path: PathBuf, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct SshConnectionOptions { pub host: String, pub username: Option, @@ -250,6 +251,7 @@ struct SshRemoteClientState { pub struct SshRemoteClient { client: Arc, inner_state: Mutex>, + connection_options: SshConnectionOptions, } impl SshRemoteClient { @@ -265,6 +267,7 @@ impl SshRemoteClient { let this = Arc::new(Self { client, inner_state: Mutex::new(None), + connection_options: connection_options.clone(), }); let inner_state = { @@ -272,8 +275,7 @@ impl SshRemoteClient { ChannelForwarder::new(incoming_tx, outgoing_rx, cx); let (ssh_connection, ssh_process) = - Self::establish_connection(connection_options.clone(), delegate.clone(), cx) - .await?; + Self::establish_connection(connection_options, delegate.clone(), cx).await?; let multiplex_task = Self::multiplex( Arc::downgrade(&this), @@ -505,6 +507,13 @@ impl SshRemoteClient { self.client.clone().into() } + pub fn connection_string(&self) -> String { + self.connection_options.connection_string() + } + + pub fn is_reconnect_underway(&self) -> bool { + maybe!({ Some(self.inner_state.try_lock()?.is_none()) }).unwrap_or_default() + } #[cfg(any(test, feature = "test-support"))] pub fn fake( client_cx: &mut gpui::TestAppContext, @@ -519,6 +528,7 @@ impl SshRemoteClient { Arc::new(Self { client, inner_state: Mutex::new(None), + connection_options: SshConnectionOptions::default(), }) }), server_cx.update(|cx| ChannelClient::new(client_to_server_rx, server_to_client_tx, cx)), diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index d6cc839cfdb7bc..81f908ce797902 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -18,7 +18,7 @@ use gpui::{ StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView, }; use project::{Project, RepositoryEntry}; -use recent_projects::RecentProjects; +use recent_projects::{OpenRemote, RecentProjects}; use rpc::proto::{self, DevServerStatus}; use smallvec::SmallVec; use std::sync::Arc; @@ -262,6 +262,46 @@ impl TitleBar { self } + fn render_ssh_project_host(&self, cx: &mut ViewContext) -> Option { + let host = self.project.read(cx).ssh_connection_string(cx)?; + let meta = SharedString::from(format!("Connected to: {host}")); + let indicator_color = if self.project.read(cx).ssh_is_connected()? { + Color::Success + } else { + Color::Warning + }; + let indicator = div() + .absolute() + .w_1_4() + .h_1_4() + .right_0p5() + .bottom_0p5() + .p_1() + .rounded_2xl() + .bg(indicator_color.color(cx)); + + Some( + div() + .child( + IconButton::new("ssh-server-icon", IconName::Server) + .tooltip(move |cx| { + Tooltip::with_meta( + "Remote Project", + Some(&OpenRemote), + meta.clone(), + cx, + ) + }) + .shape(ui::IconButtonShape::Square) + .on_click(|_, cx| { + cx.dispatch_action(OpenRemote.boxed_clone()); + }), + ) + .child(indicator) + .into_any_element(), + ) + } + pub fn render_project_host(&self, cx: &mut ViewContext) -> Option { if let Some(dev_server) = self.project @@ -296,6 +336,9 @@ impl TitleBar { .into_any_element(), ); } + if self.project.read(cx).is_via_ssh() { + return self.render_ssh_project_host(cx); + } if self.project.read(cx).is_disconnected() { return Some( From c48d4dbc6bad8127d6992fea3fbf4c3091dc9650 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 2 Oct 2024 22:06:07 -0400 Subject: [PATCH 456/762] Add basic outline panel docs (#18674) Bandaid to: https://github.com/zed-industries/zed/issues/18672 Release Notes: - Added basic outline panel docs --- docs/src/SUMMARY.md | 1 + docs/src/outline-panel.md | 26 ++++++++++++++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 docs/src/outline-panel.md diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index f0e4784f89cd92..e7d837e479bce1 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -22,6 +22,7 @@ # Using Zed - [Multibuffers](./multibuffers.md) +- [Outline Panel](./outline-panel.md) - [Code Completions](./completions.md) - [Channels](./channels.md) - [Collaboration](./collaboration.md) diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md new file mode 100644 index 00000000000000..ee654514b25c2c --- /dev/null +++ b/docs/src/outline-panel.md @@ -0,0 +1,26 @@ +# Outline Panel + +In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b`, or via the `Outline Panel` button in the status bar. + +When viewing a "singleton" buffer, the outline panel works similarly to that of the outline modal - it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. + +![Using the outline panel in a singleton buffer](https://zed.dev/img/outline-panel/singleton.png) + +The outline panel truly excels when used with multi-buffers. Here are some examples of its versatility: + +1. Project Search Results: + +Get an overview of search results across your project. +![Using the outline panel in a project search multi-buffer](https://zed.dev/img/outline-panel/project-search.png) + +2. Project Diagnostics: + +View a summary of all errors and warnings reported by the language server. +![Using the outline panel while viewing project diagnostics multi-buffer](https://zed.dev/img/outline-panel/project-diagnostics.png) + +3. Find All References: + +Quickly navigate through all references when using the `editor: find all references` action. +![Using the outline panel while viewing `find all references` multi-buffer](https://zed.dev/img/outline-panel/find-all-references.png) + +The outline view provides a great way to quickly navigate to specific parts of your code and helps you maintain context when working with large result sets in multi-buffers. From df21fe174d91cd15ee984de3c78f018b347ad8e9 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 2 Oct 2024 22:16:56 -0400 Subject: [PATCH 457/762] Add command palette action name to outline panel docs (#18678) Release Notes: - N/A --- docs/src/outline-panel.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md index ee654514b25c2c..998f310076f01b 100644 --- a/docs/src/outline-panel.md +++ b/docs/src/outline-panel.md @@ -1,6 +1,6 @@ # Outline Panel -In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b`, or via the `Outline Panel` button in the status bar. +In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b` (`outline panel: toggle focus` via the command palette), or by clicking the `Outline Panel` button in the status bar. When viewing a "singleton" buffer, the outline panel works similarly to that of the outline modal - it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. From 9cd42427d88afad3423fa2546ed656839295cf3f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 23:28:00 -0400 Subject: [PATCH 458/762] Update Rust crate thiserror to v1.0.64 (#18677) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [thiserror](https://redirect.github.com/dtolnay/thiserror) | workspace.dependencies | patch | `1.0.63` -> `1.0.64` | --- ### Release Notes
dtolnay/thiserror (thiserror) ### [`v1.0.64`](https://redirect.github.com/dtolnay/thiserror/releases/tag/1.0.64) [Compare Source](https://redirect.github.com/dtolnay/thiserror/compare/1.0.63...1.0.64) - Exclude derived impls from coverage instrumentation ([#​322](https://redirect.github.com/dtolnay/thiserror/issues/322), thanks [@​oxalica](https://redirect.github.com/oxalica))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 187d0c92c806c2..821bedbec040f8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11702,18 +11702,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" dependencies = [ "proc-macro2", "quote", From 1e8297a469a4c922c23d6c6912d13832a4a09b4b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 3 Oct 2024 15:38:42 +0300 Subject: [PATCH 459/762] Remove a debug dev config line (#18689) Follow-up of https://github.com/zed-industries/zed/pull/18645 Release Notes: - N/A --- crates/recent_projects/src/ssh_connections.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 1aff16a4a44f71..d0fffc031f0bff 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -317,7 +317,6 @@ impl SshClientDelegate { if release_channel == ReleaseChannel::Dev && platform.arch == std::env::consts::ARCH && platform.os == std::env::consts::OS - && false { use smol::process::{Command, Stdio}; From dc85378b9679253c03d5a11a8c5f0f3ae0d641d7 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 3 Oct 2024 16:23:56 +0200 Subject: [PATCH 460/762] Clean up style properties on hunk controls (#18639) This PR removes some duplicate style properties on the hunk controls, namely padding, border, and background color. Release Notes: - N/A --- crates/editor/src/hunk_diff.rs | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index ff3451fc9216b0..7fbb07ae35c79e 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -379,6 +379,7 @@ impl Editor { }); let border_color = cx.theme().colors().border_variant; + let bg_color = cx.theme().colors().editor_background; let gutter_color = match hunk.status { DiffHunkStatus::Added => cx.theme().status().created, DiffHunkStatus::Modified => cx.theme().status().modified, @@ -394,6 +395,7 @@ impl Editor { render: Box::new({ let editor = cx.view().clone(); let hunk = hunk.clone(); + move |cx| { let hunk_controls_menu_handle = editor.read(cx).hunk_controls_menu_handle.clone(); @@ -404,7 +406,7 @@ impl Editor { .w_full() .border_t_1() .border_color(border_color) - .bg(cx.theme().colors().editor_background) + .bg(bg_color) .child( div() .id("gutter-strip") @@ -424,14 +426,9 @@ impl Editor { ) .child( h_flex() - .pl_2() - .pr_6() + .px_6() .size_full() .justify_between() - .border_t_1() - .pl_6() - .pr_6() - .border_color(border_color) .child( h_flex() .gap_1() @@ -608,7 +605,7 @@ impl Editor { move |menu, _| { menu.context(focus.clone()) .action( - "Discard All", + "Discard All Hunks", RevertFile .boxed_clone(), ) From 773ad6bfd154b9f14b9b2b7009fd5d53926f9e3f Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Thu, 3 Oct 2024 10:27:19 -0400 Subject: [PATCH 461/762] Document the `theme` crate (#18690) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR enables required documentation for the `theme` crate starts on documenting it. The end goal is to have all meaningful documentation in the crate filled out – However I'm not sure that just adding `#![deny(missing_docs)]` to the whole crate is the right approach. I don't know that having 200+ "The color of the _ color" field docs is useful however–In the short term I've excluded some of the modules that contain structs with a ton of fields (`colors, `status`, etc.) until we decide what the right solution here is. Next steps are to clean up the crate, removing unused modules or those with low usage in favor of other approaches. Changes in this PR: - Enable the `deny(missing_docs)` lint for the `theme` crate - Start documenting a subset of the crate. - Enable `#![allow(missing_docs)]` for some modules. Release Notes: - N/A --- crates/theme/src/default_colors.rs | 15 +++-- crates/theme/src/default_theme.rs | 3 + crates/theme/src/font_family_cache.rs | 3 + crates/theme/src/registry.rs | 11 ++++ crates/theme/src/scale.rs | 1 + crates/theme/src/schema.rs | 2 + crates/theme/src/settings.rs | 70 +++++++++++++++++++++- crates/theme/src/styles/accents.rs | 3 + crates/theme/src/styles/colors.rs | 2 + crates/theme/src/styles/players.rs | 2 + crates/theme/src/styles/status.rs | 2 + crates/theme/src/styles/stories/color.rs | 1 + crates/theme/src/styles/stories/players.rs | 1 + crates/theme/src/styles/syntax.rs | 2 + crates/theme/src/styles/system.rs | 2 + crates/theme/src/theme.rs | 41 ++++++++++++- 16 files changed, 152 insertions(+), 9 deletions(-) diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index a7521bd374d1c3..49c216c0e07e78 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -8,12 +8,13 @@ pub(crate) fn neutral() -> ColorScaleSet { sand() } -// Note: We aren't currently making use of the default colors, as all of the -// themes have a value set for each color. -// -// We'll need to revisit these once we're ready to launch user themes, which may -// not specify a value for each color (and thus should fall back to the defaults). +/// The default colors for the theme. +/// +/// Themes that do not specify all colors are refined off of these defaults. impl ThemeColors { + /// Returns the default colors for light themes. + /// + /// Themes that do not specify all colors are refined off of these defaults. pub fn light() -> Self { let system = SystemColors::default(); @@ -114,6 +115,9 @@ impl ThemeColors { } } + /// Returns the default colors for dark themes. + /// + /// Themes that do not specify all colors are refined off of these defaults. pub fn dark() -> Self { let system = SystemColors::default(); @@ -247,6 +251,7 @@ impl TryFrom for ColorScaleSet { } } +/// Color scales used to build the default themes. pub fn default_color_scales() -> ColorScales { ColorScales { gray: gray(), diff --git a/crates/theme/src/default_theme.rs b/crates/theme/src/default_theme.rs index 6722b847badb96..58063409165330 100644 --- a/crates/theme/src/default_theme.rs +++ b/crates/theme/src/default_theme.rs @@ -45,6 +45,9 @@ pub(crate) fn zed_pro_moonlight() -> Theme { } } +/// Returns the Zed Pro theme family. +/// +/// Note: To be removed until the theme is implemented. pub fn zed_pro_family() -> ThemeFamily { ThemeFamily { id: "zed_pro".to_string(), diff --git a/crates/theme/src/font_family_cache.rs b/crates/theme/src/font_family_cache.rs index c9583b9e8d3bc7..755d829902a48f 100644 --- a/crates/theme/src/font_family_cache.rs +++ b/crates/theme/src/font_family_cache.rs @@ -25,14 +25,17 @@ struct GlobalFontFamilyCache(Arc); impl Global for GlobalFontFamilyCache {} impl FontFamilyCache { + /// Initializes the global font family cache. pub fn init_global(cx: &mut AppContext) { cx.default_global::(); } + /// Returns the global font family cache. pub fn global(cx: &AppContext) -> Arc { GlobalFontFamilyCache::global(cx).0.clone() } + /// Returns the list of font families. pub fn list_font_families(&self, cx: &AppContext) -> Vec { if self.state.read().loaded_at.is_some() { return self.state.read().font_families.clone(); diff --git a/crates/theme/src/registry.rs b/crates/theme/src/registry.rs index b70377f4408b98..a77ab141a6547b 100644 --- a/crates/theme/src/registry.rs +++ b/crates/theme/src/registry.rs @@ -17,9 +17,12 @@ use crate::{ ThemeStyles, }; +/// The metadata for a theme. #[derive(Debug, Clone)] pub struct ThemeMeta { + /// The name of the theme. pub name: SharedString, + /// The appearance of the theme. pub appearance: Appearance, } @@ -38,6 +41,7 @@ struct ThemeRegistryState { themes: HashMap>, } +/// The registry for themes. pub struct ThemeRegistry { state: RwLock, assets: Box, @@ -61,6 +65,7 @@ impl ThemeRegistry { cx.set_global(GlobalThemeRegistry(Arc::new(ThemeRegistry::new(assets)))); } + /// Creates a new [`ThemeRegistry`] with the given [`AssetSource`]. pub fn new(assets: Box) -> Self { let registry = Self { state: RwLock::new(ThemeRegistryState { @@ -99,6 +104,7 @@ impl ThemeRegistry { } } + /// Inserts user themes into the registry. pub fn insert_user_themes(&self, themes: impl IntoIterator) { self.insert_themes(themes.into_iter().map(|user_theme| { let mut theme_colors = match user_theme.appearance { @@ -185,16 +191,19 @@ impl ThemeRegistry { .retain(|name, _| !themes_to_remove.contains(name)) } + /// Removes all themes from the registry. pub fn clear(&mut self) { self.state.write().themes.clear(); } + /// Returns the names of all themes in the registry. pub fn list_names(&self, _staff: bool) -> Vec { let mut names = self.state.read().themes.keys().cloned().collect::>(); names.sort(); names } + /// Returns the metadata of all themes in the registry. pub fn list(&self, _staff: bool) -> Vec { self.state .read() @@ -207,6 +216,7 @@ impl ThemeRegistry { .collect() } + /// Returns the theme with the given name. pub fn get(&self, name: &str) -> Result> { self.state .read() @@ -261,6 +271,7 @@ impl ThemeRegistry { Ok(()) } + /// Asynchronously reads the user theme from the specified path. pub async fn read_user_theme(theme_path: &Path, fs: Arc) -> Result { let reader = fs.open_sync(theme_path).await?; let theme_family: ThemeFamilyContent = serde_json_lenient::from_reader(reader)?; diff --git a/crates/theme/src/scale.rs b/crates/theme/src/scale.rs index 1146090edcc1e7..a70dcb9789a3c8 100644 --- a/crates/theme/src/scale.rs +++ b/crates/theme/src/scale.rs @@ -1,3 +1,4 @@ +#![allow(missing_docs)] use gpui::{AppContext, Hsla, SharedString}; use crate::{ActiveTheme, Appearance}; diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index 91863061236f24..af334d8aed54b9 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use anyhow::Result; use gpui::{FontStyle, FontWeight, HighlightStyle, Hsla, WindowBackgroundAppearance}; use indexmap::IndexMap; diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 86383cec8ea079..0c8ea782cd6081 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -35,6 +35,9 @@ const MIN_LINE_HEIGHT: f32 = 1.0; Deserialize, JsonSchema, )] + +/// Specifies the density of the UI. +/// Note: This setting is still experimental. See [this tracking issue](https://github.com/zed-industries/zed/issues/18078) #[serde(rename_all = "snake_case")] pub enum UiDensity { /// A denser UI with tighter spacing and smaller elements. @@ -50,6 +53,8 @@ pub enum UiDensity { } impl UiDensity { + /// The spacing ratio of a given density. + /// TODO: Standardize usage throughout the app or remove pub fn spacing_ratio(self) -> f32 { match self { UiDensity::Compact => 0.75, @@ -80,17 +85,43 @@ impl From for String { } } +/// Customizable settings for the UI and theme system. #[derive(Clone)] pub struct ThemeSettings { + /// The UI font size. Determines the size of text in the UI, + /// as well as the size of a [gpui::Rems] unit. + /// + /// Changing this will impact the size of all UI elements. pub ui_font_size: Pixels, + /// The font used for UI elements. pub ui_font: Font, - pub buffer_font: Font, + /// The font size used for buffers, and the terminal. + /// + /// The terminal font size can be overridden using it's own setting. pub buffer_font_size: Pixels, + /// The font used for buffers, and the terminal. + /// + /// The terminal font family can be overridden using it's own setting. + pub buffer_font: Font, + /// The line height for buffers, and the terminal. + /// + /// Changing this may affect the spacing of some UI elements. + /// + /// The terminal font family can be overridden using it's own setting. pub buffer_line_height: BufferLineHeight, + /// The current theme selection. + /// TODO: Document this further pub theme_selection: Option, + /// The active theme. pub active_theme: Arc, + /// Manual overrides for the active theme. + /// + /// Note: This setting is still experimental. See [this tracking issue](https://github.com/zed-industries/zed/issues/18078) pub theme_overrides: Option, + /// The density of the UI. + /// Note: This setting is still experimental. See [this tracking issue]( pub ui_density: UiDensity, + /// The amount of fading applied to unnecessary code. pub unnecessary_code_fade: f32, } @@ -181,15 +212,21 @@ pub(crate) struct AdjustedUiFontSize(Pixels); impl Global for AdjustedUiFontSize {} +/// Represents the selection of a theme, which can be either static or dynamic. #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] #[serde(untagged)] pub enum ThemeSelection { + /// A static theme selection, represented by a single theme name. Static(#[schemars(schema_with = "theme_name_ref")] String), + /// A dynamic theme selection, which can change based the [ThemeMode]. Dynamic { + /// The mode used to determine which theme to use. #[serde(default)] mode: ThemeMode, + /// The theme to use for light mode. #[schemars(schema_with = "theme_name_ref")] light: String, + /// The theme to use for dark mode. #[schemars(schema_with = "theme_name_ref")] dark: String, }, @@ -199,6 +236,12 @@ fn theme_name_ref(_: &mut SchemaGenerator) -> Schema { Schema::new_ref("#/definitions/ThemeName".into()) } +// TODO: Rename ThemeMode -> ThemeAppearanceMode +/// The mode use to select a theme. +/// +/// `Light` and `Dark` will select their respective themes. +/// +/// `System` will select the theme based on the system's appearance. #[derive(Debug, PartialEq, Eq, Clone, Copy, Default, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum ThemeMode { @@ -214,6 +257,7 @@ pub enum ThemeMode { } impl ThemeSelection { + /// Returns the theme name for the selected [ThemeMode]. pub fn theme(&self, system_appearance: Appearance) -> &str { match self { Self::Static(theme) => theme, @@ -228,6 +272,7 @@ impl ThemeSelection { } } + /// Returns the [ThemeMode] for the [ThemeSelection]. pub fn mode(&self) -> Option { match self { ThemeSelection::Static(_) => None, @@ -327,6 +372,7 @@ impl ThemeSettingsContent { } } + /// Sets the mode for the theme. pub fn set_mode(&mut self, mode: ThemeMode) { if let Some(selection) = self.theme.as_mut() { match selection { @@ -355,16 +401,23 @@ impl ThemeSettingsContent { } } +/// The buffer's line height. #[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)] #[serde(rename_all = "snake_case")] pub enum BufferLineHeight { + /// A less dense line height. #[default] Comfortable, + /// The default line height. Standard, + /// A custom line height. + /// + /// A line height of 1.0 is the height of the buffer's font size. Custom(f32), } impl BufferLineHeight { + /// Returns the value of the line height. pub fn value(&self) -> f32 { match self { BufferLineHeight::Comfortable => 1.618, @@ -375,12 +428,15 @@ impl BufferLineHeight { } impl ThemeSettings { + /// Returns the [AdjustedBufferFontSize]. pub fn buffer_font_size(&self, cx: &AppContext) -> Pixels { cx.try_global::() .map_or(self.buffer_font_size, |size| size.0) .max(MIN_FONT_SIZE) } + // TODO: Rename: `line_height` -> `buffer_line_height` + /// Returns the buffer's line height. pub fn line_height(&self) -> f32 { f32::max(self.buffer_line_height.value(), MIN_LINE_HEIGHT) } @@ -433,6 +489,7 @@ impl ThemeSettings { } } +/// Observe changes to the adjusted buffer font size. pub fn observe_buffer_font_size_adjustment( cx: &mut ViewContext, f: impl 'static + Fn(&mut V, &mut ViewContext), @@ -440,6 +497,7 @@ pub fn observe_buffer_font_size_adjustment( cx.observe_global::(f) } +/// Sets the adjusted buffer font size. pub fn adjusted_font_size(size: Pixels, cx: &mut AppContext) -> Pixels { if let Some(AdjustedBufferFontSize(adjusted_size)) = cx.try_global::() { let buffer_font_size = ThemeSettings::get_global(cx).buffer_font_size; @@ -451,12 +509,14 @@ pub fn adjusted_font_size(size: Pixels, cx: &mut AppContext) -> Pixels { .max(MIN_FONT_SIZE) } +/// Returns the adjusted buffer font size. pub fn get_buffer_font_size(cx: &AppContext) -> Pixels { let buffer_font_size = ThemeSettings::get_global(cx).buffer_font_size; cx.try_global::() .map_or(buffer_font_size, |adjusted_size| adjusted_size.0) } +/// Adjusts the buffer font size. pub fn adjust_buffer_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { let buffer_font_size = ThemeSettings::get_global(cx).buffer_font_size; let mut adjusted_size = cx @@ -469,10 +529,12 @@ pub fn adjust_buffer_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { cx.refresh(); } +/// Returns whether the buffer font size has been adjusted. pub fn has_adjusted_buffer_font_size(cx: &mut AppContext) -> bool { cx.has_global::() } +/// Resets the buffer font size to the default value. pub fn reset_buffer_font_size(cx: &mut AppContext) { if cx.has_global::() { cx.remove_global::(); @@ -480,6 +542,8 @@ pub fn reset_buffer_font_size(cx: &mut AppContext) { } } +// TODO: Make private, change usages to use `get_ui_font_size` instead. +#[allow(missing_docs)] pub fn setup_ui_font(cx: &mut WindowContext) -> gpui::Font { let (ui_font, ui_font_size) = { let theme_settings = ThemeSettings::get_global(cx); @@ -491,12 +555,14 @@ pub fn setup_ui_font(cx: &mut WindowContext) -> gpui::Font { ui_font } +/// Gets the adjusted UI font size. pub fn get_ui_font_size(cx: &AppContext) -> Pixels { let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; cx.try_global::() .map_or(ui_font_size, |adjusted_size| adjusted_size.0) } +/// Sets the adjusted UI font size. pub fn adjust_ui_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { let ui_font_size = ThemeSettings::get_global(cx).ui_font_size; let mut adjusted_size = cx @@ -509,10 +575,12 @@ pub fn adjust_ui_font_size(cx: &mut AppContext, f: fn(&mut Pixels)) { cx.refresh(); } +/// Returns whether the UI font size has been adjusted. pub fn has_adjusted_ui_font_size(cx: &mut AppContext) -> bool { cx.has_global::() } +/// Resets the UI font size to the default value. pub fn reset_ui_font_size(cx: &mut AppContext) { if cx.has_global::() { cx.remove_global::(); diff --git a/crates/theme/src/styles/accents.rs b/crates/theme/src/styles/accents.rs index dfcd19911b26f6..e4d7f03cf62201 100644 --- a/crates/theme/src/styles/accents.rs +++ b/crates/theme/src/styles/accents.rs @@ -20,6 +20,7 @@ impl Default for AccentColors { } impl AccentColors { + /// Returns the set of dark accent colors. pub fn dark() -> Self { Self(vec![ blue().dark().step_9(), @@ -38,6 +39,7 @@ impl AccentColors { ]) } + /// Returns the set of light accent colors. pub fn light() -> Self { Self(vec![ blue().light().step_9(), @@ -58,6 +60,7 @@ impl AccentColors { } impl AccentColors { + /// Returns the color for the given index. pub fn color_for_index(&self, index: u32) -> Hsla { self.0[index as usize % self.0.len()] } diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 225275f37b6191..881a68334dcf64 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use gpui::{Hsla, WindowBackgroundAppearance}; use refineable::Refineable; use std::sync::Arc; diff --git a/crates/theme/src/styles/players.rs b/crates/theme/src/styles/players.rs index e80c7161b15b12..130721033239ce 100644 --- a/crates/theme/src/styles/players.rs +++ b/crates/theme/src/styles/players.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use gpui::Hsla; use serde_derive::Deserialize; diff --git a/crates/theme/src/styles/status.rs b/crates/theme/src/styles/status.rs index 854b876ac20b33..84afae701d0f09 100644 --- a/crates/theme/src/styles/status.rs +++ b/crates/theme/src/styles/status.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use gpui::Hsla; use refineable::Refineable; diff --git a/crates/theme/src/styles/stories/color.rs b/crates/theme/src/styles/stories/color.rs index 90e84bcf0f2ded..8e6c86ba762ba8 100644 --- a/crates/theme/src/styles/stories/color.rs +++ b/crates/theme/src/styles/stories/color.rs @@ -4,6 +4,7 @@ use story::Story; use crate::{default_color_scales, ColorScaleStep}; +/// The story showcasing all the default color scales pub struct ColorsStory; impl Render for ColorsStory { diff --git a/crates/theme/src/styles/stories/players.rs b/crates/theme/src/styles/stories/players.rs index 2b356670bf66a6..0d50c6edc9afcd 100644 --- a/crates/theme/src/styles/stories/players.rs +++ b/crates/theme/src/styles/stories/players.rs @@ -3,6 +3,7 @@ use story::Story; use crate::{ActiveTheme, PlayerColors}; +/// The story showcasing the player colors pub struct PlayerStory; impl Render for PlayerStory { diff --git a/crates/theme/src/styles/syntax.rs b/crates/theme/src/styles/syntax.rs index 8016445c16c6af..0a97ff77f23c8e 100644 --- a/crates/theme/src/styles/syntax.rs +++ b/crates/theme/src/styles/syntax.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use std::sync::Arc; use gpui::{HighlightStyle, Hsla}; diff --git a/crates/theme/src/styles/system.rs b/crates/theme/src/styles/system.rs index aeb0865155d68a..54e892b79c49f9 100644 --- a/crates/theme/src/styles/system.rs +++ b/crates/theme/src/styles/system.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use gpui::{hsla, Hsla}; #[derive(Clone)] diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index af38c9efc6f7ec..a6ca59d734e245 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -1,3 +1,5 @@ +#![deny(missing_docs)] + //! # Theme //! //! This crate provides the theme system for Zed. @@ -10,6 +12,9 @@ mod default_colors; mod default_theme; mod font_family_cache; mod one_themes; +/// A prelude for working with the theme system. +/// +/// TODO: remove this. This only publishes default colors. pub mod prelude; mod registry; mod scale; @@ -35,16 +40,22 @@ use gpui::{ }; use serde::Deserialize; +/// Defines window border radius for platforms that use client side decorations. +pub const CLIENT_SIDE_DECORATION_ROUNDING: Pixels = px(10.0); +/// Defines window shadow size for platforms that use client side decorations. +pub const CLIENT_SIDE_DECORATION_SHADOW: Pixels = px(10.0); + +/// The appearance of the theme. #[derive(Debug, PartialEq, Clone, Copy, Deserialize)] pub enum Appearance { + /// A light appearance. Light, + /// A dark appearance. Dark, } -pub const CLIENT_SIDE_DECORATION_ROUNDING: Pixels = px(10.0); -pub const CLIENT_SIDE_DECORATION_SHADOW: Pixels = px(10.0); - impl Appearance { + /// Returns whether the appearance is light. pub fn is_light(&self) -> bool { match self { Self::Light => true, @@ -62,6 +73,7 @@ impl From for Appearance { } } +/// Which themes should be loaded. This is used primarlily for testing. pub enum LoadThemes { /// Only load the base theme. /// @@ -72,6 +84,7 @@ pub enum LoadThemes { All(Box), } +/// Initialize the theme system. pub fn init(themes_to_load: LoadThemes, cx: &mut AppContext) { let (assets, load_user_themes) = match themes_to_load { LoadThemes::JustBase => (Box::new(()) as Box, false), @@ -97,7 +110,9 @@ pub fn init(themes_to_load: LoadThemes, cx: &mut AppContext) { .detach(); } +/// Implementing this trait allows accessing the active theme. pub trait ActiveTheme { + /// Returns the active theme. fn theme(&self) -> &Arc; } @@ -107,21 +122,39 @@ impl ActiveTheme for AppContext { } } +/// A theme family is a grouping of themes under a single name. +/// +/// For example, the "One" theme family contains the "One Light" and "One Dark" themes. +/// +/// It can also be used to package themes with many variants. +/// +/// For example, the "Atelier" theme family contains "Cave", "Dune", "Estuary", "Forest", "Heath", etc. pub struct ThemeFamily { + /// The unique identifier for the theme family. pub id: String, + /// The name of the theme family. This will be displayed in the UI, such as when adding or removing a theme family. pub name: SharedString, + /// The author of the theme family. pub author: SharedString, + /// The [Theme]s in the family. pub themes: Vec, + /// The color scales used by the themes in the family. + /// Note: This will be removed in the future. pub scales: ColorScales, } impl ThemeFamily {} +/// A theme is the primary mechanism for defining the appearance of the UI. #[derive(Clone)] pub struct Theme { + /// The unique identifier for the theme. pub id: String, + /// The name of the theme. pub name: SharedString, + /// The appearance of the theme (light or dark). pub appearance: Appearance, + /// The colors and other styles for the theme. pub styles: ThemeStyles, } @@ -181,6 +214,8 @@ impl Theme { } } +/// Compounds a color with an alpha value. +/// TODO: Replace this with a method on Hsla. pub fn color_alpha(color: Hsla, alpha: f32) -> Hsla { let mut color = color; color.a = alpha; From 29796aa4128f917c8e3056d04f3801b2eac657d6 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 11:14:22 -0400 Subject: [PATCH 462/762] Update Rust crate serde_json to v1.0.128 (#18669) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [serde_json](https://redirect.github.com/serde-rs/json) | dependencies | patch | `1.0.127` -> `1.0.128` | | [serde_json](https://redirect.github.com/serde-rs/json) | workspace.dependencies | patch | `1.0.127` -> `1.0.128` | --- ### Release Notes
serde-rs/json (serde_json) ### [`v1.0.128`](https://redirect.github.com/serde-rs/json/releases/tag/1.0.128) [Compare Source](https://redirect.github.com/serde-rs/json/compare/1.0.127...1.0.128) - Support serializing maps containing 128-bit integer keys to serde_json::Value ([#​1188](https://redirect.github.com/serde-rs/json/issues/1188), thanks [@​Mrreadiness](https://redirect.github.com/Mrreadiness))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about these updates again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 821bedbec040f8..a94de65bc9896c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10194,9 +10194,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.127" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ "indexmap 2.4.0", "itoa", From ddcd45bb457215afcd74f3d2bf995c7687604ff1 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 3 Oct 2024 17:27:42 +0200 Subject: [PATCH 463/762] docs: Add tweaks to the outline panel page (#18697) Thought we could be extra clear here with the meaning of "singleton buffers". Release Notes: - N/A --- docs/src/outline-panel.md | 13 +++++++++---- docs/theme/css/general.css | 1 + docs/theme/css/variables.css | 4 ++++ 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md index 998f310076f01b..bc743596d6bcb1 100644 --- a/docs/src/outline-panel.md +++ b/docs/src/outline-panel.md @@ -2,25 +2,30 @@ In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b` (`outline panel: toggle focus` via the command palette), or by clicking the `Outline Panel` button in the status bar. -When viewing a "singleton" buffer, the outline panel works similarly to that of the outline modal - it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. +When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. ![Using the outline panel in a singleton buffer](https://zed.dev/img/outline-panel/singleton.png) +## Usage with multibuffers + The outline panel truly excels when used with multi-buffers. Here are some examples of its versatility: -1. Project Search Results: +### Project Search Results Get an overview of search results across your project. + ![Using the outline panel in a project search multi-buffer](https://zed.dev/img/outline-panel/project-search.png) -2. Project Diagnostics: +### Project Diagnostics View a summary of all errors and warnings reported by the language server. + ![Using the outline panel while viewing project diagnostics multi-buffer](https://zed.dev/img/outline-panel/project-diagnostics.png) -3. Find All References: +### Find All References Quickly navigate through all references when using the `editor: find all references` action. + ![Using the outline panel while viewing `find all references` multi-buffer](https://zed.dev/img/outline-panel/find-all-references.png) The outline view provides a great way to quickly navigate to specific parts of your code and helps you maintain context when working with large result sets in multi-buffers. diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index b422890751c508..d1b8e9b92653e7 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -182,6 +182,7 @@ h6:target::before { .content img, .content video { max-width: 100%; + background-color: var(--media-bg); border: 1px solid; border-color: var(--border); border-radius: 8px; diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index a7c0ed7114b405..55ae4a427da269 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -26,6 +26,8 @@ --border-light: hsl(220, 13%, 90%); --border-hover: hsl(220, 13%, 70%); + --media-bg: hsl(50, 25%, 92%); + --sidebar-fg: hsl(0, 0%, 0%); --sidebar-non-existant: #aaaaaa; --sidebar-active: hsl(220, 93%, 42%); @@ -98,6 +100,8 @@ --border-light: hsl(220, 13%, 90%); --border-hover: hsl(220, 13%, 40%); + --media-bg: hsl(220, 13%, 8%); + --sidebar-bg: hsl(220, 13%, 10%); --sidebar-fg: hsl(220, 14%, 71%); --sidebar-non-existant: #505254; From ded3d3fc14a438f606d5e8b03eaf92310723fce4 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 11:29:29 -0400 Subject: [PATCH 464/762] Update Python to v3.12.7 (#18652) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [python](https://redirect.github.com/containerbase/python-prebuild) | dependencies | patch | `3.12.6` -> `3.12.7` | --- ### Release Notes
containerbase/python-prebuild (python) ### [`v3.12.7`](https://redirect.github.com/containerbase/python-prebuild/releases/tag/3.12.7) [Compare Source](https://redirect.github.com/containerbase/python-prebuild/compare/3.12.6...3.12.7) ##### Bug Fixes - **deps:** update dependency python to v3.12.7
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- script/update_top_ranking_issues/poetry.lock | 4 ++-- script/update_top_ranking_issues/pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/script/update_top_ranking_issues/poetry.lock b/script/update_top_ranking_issues/poetry.lock index a85844e645f100..c151c8bc1ddd74 100644 --- a/script/update_top_ranking_issues/poetry.lock +++ b/script/update_top_ranking_issues/poetry.lock @@ -529,5 +529,5 @@ files = [ [metadata] lock-version = "2.0" -python-versions = "3.12.6" -content-hash = "7827704e06a8c195297507e0d05e7a7c3843ed299bd353f31570ee4c435c6896" +python-versions = "3.12.7" +content-hash = "809bd421af8a34dd500ba704d954ae8e1f6edf15b6af74a0d3fda987b69c8cbe" diff --git a/script/update_top_ranking_issues/pyproject.toml b/script/update_top_ranking_issues/pyproject.toml index 15d8346bb99103..cd5cd2cc2ed270 100644 --- a/script/update_top_ranking_issues/pyproject.toml +++ b/script/update_top_ranking_issues/pyproject.toml @@ -8,7 +8,7 @@ readme = "README.md" [tool.poetry.dependencies] mypy = "1.6.0" PyGithub = "1.55" -python = "3.12.6" +python = "3.12.7" pytz = "2022.1" typer = "0.9.0" types-pytz = "2023.3.1.1" From f7b3680e4dbd664dc72344f67b957907260bc2d7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 11:32:04 -0400 Subject: [PATCH 465/762] Update Rust crate pretty_assertions to v1.4.1 (#18668) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pretty_assertions](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions) | workspace.dependencies | patch | `1.4.0` -> `1.4.1` | --- ### Release Notes
rust-pretty-assertions/rust-pretty-assertions (pretty_assertions) ### [`v1.4.1`](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions/blob/HEAD/CHANGELOG.md#v141) [Compare Source](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions/compare/v1.4.0...v1.4.1) #### Fixed - Show feature-flagged code in documentation. Thanks to [@​sandydoo](https://redirect.github.com/sandydoo) for the fix! ([#​130](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions/pull/130)) #### Internal - Bump `yansi` version to `1.x`. Thanks to [@​SergioBenitez](https://redirect.github.com/SergioBenitez) for the update, and maintaining this library! ([#​121](https://redirect.github.com/rust-pretty-assertions/rust-pretty-assertions/pull/121))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a94de65bc9896c..16597cd5d2f12e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8370,9 +8370,9 @@ dependencies = [ [[package]] name = "pretty_assertions" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", @@ -14527,9 +14527,9 @@ dependencies = [ [[package]] name = "yansi" -version = "0.5.1" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" [[package]] name = "yazi" From a9f816d5fb143380a8c3876e7a31e2c10826bb45 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 3 Oct 2024 12:38:51 -0400 Subject: [PATCH 466/762] telemetry_events: Update crate-level docs (#18703) This PR updates the `telemetry_events` crate to use module-level documentation for its crate-level docs. Release Notes: - N/A --- crates/telemetry_events/src/telemetry_events.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index d6e737b929e1f5..f7b18523df17eb 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -1,4 +1,5 @@ -/// Please see: [Telemetry in Zed](https://zed.dev/docs/telemetry) for additional documentation. +//! See [Telemetry in Zed](https://zed.dev/docs/telemetry) for additional information. + use semantic_version::SemanticVersion; use serde::{Deserialize, Serialize}; use std::{fmt::Display, sync::Arc, time::Duration}; From 8c95b8d89a9c7eb1a21fb88565806f2d780e45f8 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Thu, 3 Oct 2024 13:17:31 -0400 Subject: [PATCH 467/762] `theme` crate spring cleaning (#18695) This PR does some spring cleaning on the `theme` crate: - Removed two unused stories and the story dep - Removed the `one` theme family (from the `theme` crate, not the app), this is now `zed_default_themes`. - This will hopefully remove some confusion caused by this theme we started in rust but didn't end up using - Removed `theme::prelude` (it just re-exported scale colors, which we don't use outside `theme`) - Removed completely unused `zed_pro` themes (we started on these during the gpui2 port and didn't finish them.) Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- Cargo.lock | 1 - crates/theme/Cargo.toml | 2 - crates/theme/src/default_theme.rs | 71 --------- .../src/{one_themes.rs => fallback_themes.rs} | 18 +-- crates/theme/src/prelude.rs | 6 - crates/theme/src/registry.rs | 9 +- crates/theme/src/settings.rs | 4 +- crates/theme/src/styles.rs | 6 - crates/theme/src/styles/stories/color.rs | 40 ----- crates/theme/src/styles/stories/mod.rs | 5 - crates/theme/src/styles/stories/players.rs | 143 ------------------ crates/theme/src/theme.rs | 8 +- crates/theme/theme.md | 15 -- crates/theme/util/hex_to_hsla.py | 35 ----- 14 files changed, 15 insertions(+), 348 deletions(-) delete mode 100644 crates/theme/src/default_theme.rs rename crates/theme/src/{one_themes.rs => fallback_themes.rs} (96%) delete mode 100644 crates/theme/src/prelude.rs delete mode 100644 crates/theme/src/styles/stories/color.rs delete mode 100644 crates/theme/src/styles/stories/mod.rs delete mode 100644 crates/theme/src/styles/stories/players.rs delete mode 100644 crates/theme/theme.md delete mode 100644 crates/theme/util/hex_to_hsla.py diff --git a/Cargo.lock b/Cargo.lock index 16597cd5d2f12e..0a14f04f892b68 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11655,7 +11655,6 @@ dependencies = [ "serde_json_lenient", "serde_repr", "settings", - "story", "util", "uuid", ] diff --git a/crates/theme/Cargo.toml b/crates/theme/Cargo.toml index 934faa1186286b..b751bea727c754 100644 --- a/crates/theme/Cargo.toml +++ b/crates/theme/Cargo.toml @@ -10,7 +10,6 @@ workspace = true [features] default = [] -stories = ["dep:story"] test-support = ["gpui/test-support", "fs/test-support", "settings/test-support"] [lib] @@ -36,7 +35,6 @@ serde_json.workspace = true serde_json_lenient.workspace = true serde_repr.workspace = true settings.workspace = true -story = { workspace = true, optional = true } util.workspace = true uuid.workspace = true diff --git a/crates/theme/src/default_theme.rs b/crates/theme/src/default_theme.rs deleted file mode 100644 index 58063409165330..00000000000000 --- a/crates/theme/src/default_theme.rs +++ /dev/null @@ -1,71 +0,0 @@ -use std::sync::Arc; - -use gpui::WindowBackgroundAppearance; - -use crate::AccentColors; - -use crate::{ - default_color_scales, - one_themes::{one_dark, one_family}, - Appearance, PlayerColors, StatusColors, SyntaxTheme, SystemColors, Theme, ThemeColors, - ThemeFamily, ThemeStyles, -}; - -fn zed_pro_daylight() -> Theme { - Theme { - id: "zed_pro_daylight".to_string(), - name: "Zed Pro Daylight".into(), - appearance: Appearance::Light, - styles: ThemeStyles { - window_background_appearance: WindowBackgroundAppearance::Opaque, - system: SystemColors::default(), - colors: ThemeColors::light(), - status: StatusColors::light(), - player: PlayerColors::light(), - syntax: Arc::new(SyntaxTheme::default()), - accents: AccentColors::light(), - }, - } -} - -pub(crate) fn zed_pro_moonlight() -> Theme { - Theme { - id: "zed_pro_moonlight".to_string(), - name: "Zed Pro Moonlight".into(), - appearance: Appearance::Dark, - styles: ThemeStyles { - window_background_appearance: WindowBackgroundAppearance::Opaque, - system: SystemColors::default(), - colors: ThemeColors::dark(), - status: StatusColors::dark(), - player: PlayerColors::dark(), - syntax: Arc::new(SyntaxTheme::default()), - accents: AccentColors::dark(), - }, - } -} - -/// Returns the Zed Pro theme family. -/// -/// Note: To be removed until the theme is implemented. -pub fn zed_pro_family() -> ThemeFamily { - ThemeFamily { - id: "zed_pro".to_string(), - name: "Zed Pro".into(), - author: "Zed Team".into(), - themes: vec![zed_pro_daylight(), zed_pro_moonlight()], - scales: default_color_scales(), - } -} - -impl Default for ThemeFamily { - fn default() -> Self { - one_family() - } -} - -impl Default for Theme { - fn default() -> Self { - one_dark() - } -} diff --git a/crates/theme/src/one_themes.rs b/crates/theme/src/fallback_themes.rs similarity index 96% rename from crates/theme/src/one_themes.rs rename to crates/theme/src/fallback_themes.rs index 50a4184e8bc934..553c75623381d6 100644 --- a/crates/theme/src/one_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -7,21 +7,21 @@ use crate::{ SystemColors, Theme, ThemeColors, ThemeFamily, ThemeStyles, }; -// Note: This theme family is not the one you see in Zed at the moment. -// This is a from-scratch rebuild that Nate started work on. We currently -// only use this in the tests, and the One family from the `themes/` directory -// is what gets loaded into Zed when running it. -pub fn one_family() -> ThemeFamily { +/// The default theme family for Zed. +/// +/// This is used to construct the default theme fallback values, as well as to +/// have a theme available at compile time for tests. +pub fn zed_default_themes() -> ThemeFamily { ThemeFamily { - id: "one".to_string(), - name: "One".into(), + id: "zed-default".to_string(), + name: "Zed Default".into(), author: "".into(), - themes: vec![one_dark()], + themes: vec![zed_default_dark()], scales: default_color_scales(), } } -pub(crate) fn one_dark() -> Theme { +pub(crate) fn zed_default_dark() -> Theme { let bg = hsla(215. / 360., 12. / 100., 15. / 100., 1.); let editor = hsla(220. / 360., 12. / 100., 18. / 100., 1.); let elevated_surface = hsla(225. / 360., 12. / 100., 17. / 100., 1.); diff --git a/crates/theme/src/prelude.rs b/crates/theme/src/prelude.rs deleted file mode 100644 index e8e2378249fd4a..00000000000000 --- a/crates/theme/src/prelude.rs +++ /dev/null @@ -1,6 +0,0 @@ -#[allow(unused)] -pub(crate) use crate::default_colors::{ - amber, black, blue, bronze, brown, crimson, cyan, gold, grass, gray, green, indigo, iris, jade, - lime, mauve, mint, olive, orange, pink, plum, purple, red, ruby, sage, sand, sky, slate, teal, - tomato, violet, white, yellow, -}; diff --git a/crates/theme/src/registry.rs b/crates/theme/src/registry.rs index a77ab141a6547b..771511973f5743 100644 --- a/crates/theme/src/registry.rs +++ b/crates/theme/src/registry.rs @@ -74,12 +74,9 @@ impl ThemeRegistry { assets, }; - // We're loading our new versions of the One themes by default, as - // we need them to be loaded for tests. - // - // These themes will get overwritten when `load_user_themes` is called - // when Zed starts, so the One variants used will be the ones ported from Zed1. - registry.insert_theme_families([crate::one_themes::one_family()]); + // We're loading the Zed default theme, as we need a theme to be loaded + // for tests. + registry.insert_theme_families([crate::fallback_themes::zed_default_themes()]); registry } diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 0c8ea782cd6081..d126ec058c2c62 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -1,4 +1,4 @@ -use crate::one_themes::one_dark; +use crate::fallback_themes::zed_default_dark; use crate::{Appearance, SyntaxTheme, Theme, ThemeRegistry, ThemeStyleContent}; use anyhow::Result; use derive_more::{Deref, DerefMut}; @@ -629,7 +629,7 @@ impl settings::Settings for ThemeSettings { theme_selection: defaults.theme.clone(), active_theme: themes .get(defaults.theme.as_ref().unwrap().theme(*system_appearance)) - .or(themes.get(&one_dark().name)) + .or(themes.get(&zed_default_dark().name)) .unwrap(), theme_overrides: None, ui_density: defaults.ui_density.unwrap_or(UiDensity::Default), diff --git a/crates/theme/src/styles.rs b/crates/theme/src/styles.rs index 137603113aadcb..da22f8de1f5f1b 100644 --- a/crates/theme/src/styles.rs +++ b/crates/theme/src/styles.rs @@ -5,15 +5,9 @@ mod status; mod syntax; mod system; -#[cfg(feature = "stories")] -mod stories; - pub use accents::*; pub use colors::*; pub use players::*; pub use status::*; pub use syntax::*; pub use system::*; - -#[cfg(feature = "stories")] -pub use stories::*; diff --git a/crates/theme/src/styles/stories/color.rs b/crates/theme/src/styles/stories/color.rs deleted file mode 100644 index 8e6c86ba762ba8..00000000000000 --- a/crates/theme/src/styles/stories/color.rs +++ /dev/null @@ -1,40 +0,0 @@ -use gpui::prelude::*; -use gpui::{div, px, ViewContext}; -use story::Story; - -use crate::{default_color_scales, ColorScaleStep}; - -/// The story showcasing all the default color scales -pub struct ColorsStory; - -impl Render for ColorsStory { - fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let color_scales = default_color_scales(); - - Story::container().child(Story::title("Colors")).child( - div() - .id("colors") - .flex() - .flex_col() - .gap_1() - .overflow_y_scroll() - .text_color(gpui::white()) - .children(color_scales.into_iter().map(|scale| { - div() - .flex() - .child( - div() - .w(px(75.)) - .line_height(px(24.)) - .child(scale.name().clone()), - ) - .child( - div().flex().gap_1().children( - ColorScaleStep::ALL - .map(|step| div().flex().size_6().bg(scale.step(cx, step))), - ), - ) - })), - ) - } -} diff --git a/crates/theme/src/styles/stories/mod.rs b/crates/theme/src/styles/stories/mod.rs deleted file mode 100644 index af6af965484adc..00000000000000 --- a/crates/theme/src/styles/stories/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod color; -mod players; - -pub use color::*; -pub use players::*; diff --git a/crates/theme/src/styles/stories/players.rs b/crates/theme/src/styles/stories/players.rs deleted file mode 100644 index 0d50c6edc9afcd..00000000000000 --- a/crates/theme/src/styles/stories/players.rs +++ /dev/null @@ -1,143 +0,0 @@ -use gpui::{div, img, px, IntoElement, ParentElement, Render, Styled, ViewContext}; -use story::Story; - -use crate::{ActiveTheme, PlayerColors}; - -/// The story showcasing the player colors -pub struct PlayerStory; - -impl Render for PlayerStory { - fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - Story::container().child( - div() - .flex() - .flex_col() - .gap_4() - .child(Story::title_for::()) - .child(Story::label("Player Colors")) - .child( - div() - .flex() - .flex_col() - .gap_1() - .child( - div().flex().gap_1().children( - cx.theme() - .players() - .0 - .clone() - .iter_mut() - .map(|player| div().w_8().h_8().rounded_md().bg(player.cursor)), - ), - ) - .child( - div().flex().gap_1().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div().w_8().h_8().rounded_md().bg(player.background) - }), - ), - ) - .child( - div().flex().gap_1().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div().w_8().h_8().rounded_md().bg(player.selection) - }), - ), - ), - ) - .child(Story::label("Avatar Rings")) - .child(div().flex().gap_1().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div() - .my_1() - .rounded_full() - .border_2() - .border_color(player.cursor) - .child( - img("https://avatars.githubusercontent.com/u/1714999?v=4") - .rounded_full() - .size_6() - .bg(gpui::red()), - ) - }), - )) - .child(Story::label("Player Backgrounds")) - .child(div().flex().gap_1().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div() - .my_1() - .rounded_xl() - .flex() - .items_center() - .h_8() - .py_0p5() - .px_1p5() - .bg(player.background) - .child( - div() - .relative() - .mx_neg_1() - .rounded_full() - .border_2() - .border_color(player.background) - .size(px(28.)) - .child( - img("https://avatars.githubusercontent.com/u/1714999?v=4") - .rounded_full() - .size(px(24.)) - .bg(gpui::red()), - ), - ) - .child( - div() - .relative() - .mx_neg_1() - .rounded_full() - .border_2() - .border_color(player.background) - .size(px(28.)) - .child( - img("https://avatars.githubusercontent.com/u/1714999?v=4") - .rounded_full() - .size(px(24.)) - .bg(gpui::red()), - ), - ) - .child( - div() - .relative() - .mx_neg_1() - .rounded_full() - .border_2() - .border_color(player.background) - .size(px(28.)) - .child( - img("https://avatars.githubusercontent.com/u/1714999?v=4") - .rounded_full() - .size(px(24.)) - .bg(gpui::red()), - ), - ) - }), - )) - .child(Story::label("Player Selections")) - .child(div().flex().flex_col().gap_px().children( - cx.theme().players().0.clone().iter_mut().map(|player| { - div() - .flex() - .child( - div() - .flex() - .flex_none() - .rounded_sm() - .px_0p5() - .text_color(cx.theme().colors().text) - .bg(player.selection) - .child("The brown fox jumped over the lazy dog."), - ) - .child(div().flex_1()) - }), - )), - ) - } -} diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index a6ca59d734e245..d4436e53295278 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -9,13 +9,8 @@ //! A theme is a collection of colors used to build a consistent appearance for UI components across the application. mod default_colors; -mod default_theme; +mod fallback_themes; mod font_family_cache; -mod one_themes; -/// A prelude for working with the theme system. -/// -/// TODO: remove this. This only publishes default colors. -pub mod prelude; mod registry; mod scale; mod schema; @@ -26,7 +21,6 @@ use std::sync::Arc; use ::settings::{Settings, SettingsStore}; pub use default_colors::*; -pub use default_theme::*; pub use font_family_cache::*; pub use registry::*; pub use scale::*; diff --git a/crates/theme/theme.md b/crates/theme/theme.md deleted file mode 100644 index 4cb19eb3c4ebef..00000000000000 --- a/crates/theme/theme.md +++ /dev/null @@ -1,15 +0,0 @@ -# Theme - -This crate provides the theme system for Zed. - -## Overview - -A theme is a collection of colors used to build a consistent appearance for UI components across the application. -To produce a theme in Zed, - -A theme is made of two parts: A [ThemeFamily] and one or more [Theme]s. - -// -A [ThemeFamily] contains metadata like theme name, author, and theme-specific [ColorScales] as well as a series of themes. - -- [ThemeColors] - A set of colors that are used to style the UI. Refer to the [ThemeColors] documentation for more information. diff --git a/crates/theme/util/hex_to_hsla.py b/crates/theme/util/hex_to_hsla.py deleted file mode 100644 index 17faa186d8c6e6..00000000000000 --- a/crates/theme/util/hex_to_hsla.py +++ /dev/null @@ -1,35 +0,0 @@ -import colorsys -import sys - -def hex_to_rgb(hex): - hex = hex.lstrip('#') - if len(hex) == 8: # 8 digit hex color - r, g, b, a = (int(hex[i:i+2], 16) for i in (0, 2, 4, 6)) - return r, g, b, a / 255.0 - else: # 6 digit hex color - return tuple(int(hex[i:i+2], 16) for i in (0, 2, 4)) + (1.0,) - -def rgb_to_hsla(rgb): - h, l, s = colorsys.rgb_to_hls(rgb[0]/255.0, rgb[1]/255.0, rgb[2]/255.0) - a = rgb[3] # alpha value - return (round(h * 360, 1), round(s * 100, 1), round(l * 100, 1), round(a, 3)) - -def hex_to_hsla(hex): - return rgb_to_hsla(hex_to_rgb(hex)) - -if len(sys.argv) != 2: - print("Usage: python util/hex_to_hsla.py <6 or 8 digit hex color or comma-separated list of colors>") -else: - input_arg = sys.argv[1] - if ',' in input_arg: # comma-separated list of colors - hex_colors = input_arg.split(',') - hslas = [] # output array - for hex_color in hex_colors: - hex_color = hex_color.strip("'\" ") - h, s, l, a = hex_to_hsla(hex_color) - hslas.append(f"hsla({h} / 360., {s} / 100., {l} / 100., {a})") - print(hslas) - else: # single color - hex_color = input_arg.strip("'\"") - h, s, l, a = hex_to_hsla(hex_color) - print(f"hsla({h} / 360., {s} / 100., {l} / 100., {a})") From cddd7875a42cb017b2751e5b6639ba322f6629df Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 3 Oct 2024 13:37:43 -0400 Subject: [PATCH 468/762] Extract Protocol Buffers support into an extension (#18704) This PR extracts the Protocol Buffers support into an extension. Release Notes: - Removed built-in support for Protocol Buffers, in favor of making it available as an extension. The Protocol Buffers extension will be suggested for download when you open a `.proto` file. --- Cargo.lock | 10 ---------- Cargo.toml | 1 - crates/extensions_ui/src/extension_suggest.rs | 1 + crates/languages/Cargo.toml | 2 -- crates/languages/src/lib.rs | 4 +--- extensions/proto/extension.toml | 11 +++++++++++ .../proto/languages}/proto/config.toml | 0 .../proto/languages}/proto/highlights.scm | 0 .../proto/languages}/proto/outline.scm | 0 9 files changed, 13 insertions(+), 16 deletions(-) create mode 100644 extensions/proto/extension.toml rename {crates/languages/src => extensions/proto/languages}/proto/config.toml (100%) rename {crates/languages/src => extensions/proto/languages}/proto/highlights.scm (100%) rename {crates/languages/src => extensions/proto/languages}/proto/outline.scm (100%) diff --git a/Cargo.lock b/Cargo.lock index 0a14f04f892b68..12b38967cc3fbf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6386,7 +6386,6 @@ dependencies = [ "node_runtime", "paths", "project", - "protols-tree-sitter-proto", "regex", "rope", "rust-embed", @@ -8644,15 +8643,6 @@ version = "2.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" -[[package]] -name = "protols-tree-sitter-proto" -version = "0.2.0" -source = "git+https://github.com/zed-industries/tree-sitter-proto?rev=0848bd30a64be48772e15fbb9d5ba8c0cc5772ad#0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" -dependencies = [ - "cc", - "tree-sitter-language", -] - [[package]] name = "psm" version = "0.1.21" diff --git a/Cargo.toml b/Cargo.toml index 8feb93a57856a2..a23663f5c818f8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -388,7 +388,6 @@ profiling = "1" prost = "0.9" prost-build = "0.9" prost-types = "0.9" -protols-tree-sitter-proto = { git = "https://github.com/zed-industries/tree-sitter-proto", rev = "0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" } pulldown-cmark = { version = "0.12.0", default-features = false } rand = "0.8.5" regex = "1.5" diff --git a/crates/extensions_ui/src/extension_suggest.rs b/crates/extensions_ui/src/extension_suggest.rs index 89f51fdebcfee9..ed003f25b7f235 100644 --- a/crates/extensions_ui/src/extension_suggest.rs +++ b/crates/extensions_ui/src/extension_suggest.rs @@ -54,6 +54,7 @@ const SUGGESTIONS_BY_EXTENSION_ID: &[(&str, &[&str])] = &[ ("ocaml", &["ml", "mli"]), ("php", &["php"]), ("prisma", &["prisma"]), + ("proto", &["proto"]), ("purescript", &["purs"]), ("r", &["r", "R"]), ("racket", &["rkt"]), diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index 19842efac20cf7..dc7868ea86dda4 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -21,7 +21,6 @@ load-grammars = [ "tree-sitter-jsdoc", "tree-sitter-json", "tree-sitter-md", - "protols-tree-sitter-proto", "tree-sitter-python", "tree-sitter-regex", "tree-sitter-rust", @@ -46,7 +45,6 @@ lsp.workspace = true node_runtime.workspace = true paths.workspace = true project.workspace = true -protols-tree-sitter-proto = { workspace = true, optional = true } regex.workspace = true rope.workspace = true rust-embed.workspace = true diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 295df6e419b7ec..374b32c0ac3494 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -45,7 +45,6 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu ("jsonc", tree_sitter_json::LANGUAGE), ("markdown", tree_sitter_md::LANGUAGE), ("markdown-inline", tree_sitter_md::INLINE_LANGUAGE), - ("proto", protols_tree_sitter_proto::LANGUAGE), ("python", tree_sitter_python::LANGUAGE), ("regex", tree_sitter_regex::LANGUAGE), ("rust", tree_sitter_rust::LANGUAGE), @@ -183,7 +182,6 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu "yaml", vec![Arc::new(yaml::YamlLspAdapter::new(node_runtime.clone()))] ); - language!("proto"); // Register globally available language servers. // @@ -277,7 +275,7 @@ pub fn language(name: &str, grammar: tree_sitter::Language) -> Arc { fn load_config(name: &str) -> LanguageConfig { let config_toml = String::from_utf8( LanguageDir::get(&format!("{}/config.toml", name)) - .unwrap() + .unwrap_or_else(|| panic!("missing config for language {:?}", name)) .data .to_vec(), ) diff --git a/extensions/proto/extension.toml b/extensions/proto/extension.toml new file mode 100644 index 00000000000000..a49ba7a4c4a7ec --- /dev/null +++ b/extensions/proto/extension.toml @@ -0,0 +1,11 @@ +id = "proto" +name = "Proto" +description = "Protocol Buffers support." +version = "0.1.0" +schema_version = 1 +authors = ["Zed Industries "] +repository = "https://github.com/zed-industries/zed" + +[grammars.proto] +repository = "https://github.com/zed-industries/tree-sitter-proto" +commit = "0848bd30a64be48772e15fbb9d5ba8c0cc5772ad" diff --git a/crates/languages/src/proto/config.toml b/extensions/proto/languages/proto/config.toml similarity index 100% rename from crates/languages/src/proto/config.toml rename to extensions/proto/languages/proto/config.toml diff --git a/crates/languages/src/proto/highlights.scm b/extensions/proto/languages/proto/highlights.scm similarity index 100% rename from crates/languages/src/proto/highlights.scm rename to extensions/proto/languages/proto/highlights.scm diff --git a/crates/languages/src/proto/outline.scm b/extensions/proto/languages/proto/outline.scm similarity index 100% rename from crates/languages/src/proto/outline.scm rename to extensions/proto/languages/proto/outline.scm From cac98b7bbff89e6b0c7f632940ce55597fa1c55c Mon Sep 17 00:00:00 2001 From: Jordan Pittman Date: Thu, 3 Oct 2024 14:38:17 -0400 Subject: [PATCH 469/762] Show color swatches for LSP completions (#18665) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #11991 Release Notes: - Added support for color swatches for language server completions. Screenshot 2024-10-02 at 19 02 22 Screenshot 2024-10-02 at 19 02 48 This implementation is mostly a port of the VSCode version of the ColorExtractor. It seems reasonable the we should support _at least_ what VSCode does for detecting color swatches from LSP completions. This implementation could definitely be better perf-wise by writing a dedicated color parser. I also think it would be neat if, in the future, Zed handled _more_ color formats — especially wide-gamut colors. There are a few differences to the regexes in the VSCode implementation but mainly so simplify the implementation : - The hex vs rgb/hsl regexes were split into two parts - The rgb/hsl regexes allow 3 or 4 color components whether hsla/rgba or not and the parsing implementation accepts/rejects colors as needed --------- Co-authored-by: Marshall Bowers --- crates/editor/src/editor.rs | 5 + crates/project/src/color_extractor.rs | 297 ++++++++++++++++++++++++++ crates/project/src/project.rs | 17 +- typos.toml | 4 +- 4 files changed, 320 insertions(+), 3 deletions(-) create mode 100644 crates/project/src/color_extractor.rs diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 525a94f2582ab1..e2355461046e36 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1228,6 +1228,10 @@ impl CompletionsMenu { None }; + let color_swatch = completion + .color() + .map(|color| div().size_4().bg(color).rounded(px(2.))); + div().min_w(px(220.)).max_w(px(540.)).child( ListItem::new(mat.candidate_id) .inset(true) @@ -1243,6 +1247,7 @@ impl CompletionsMenu { task.detach_and_log_err(cx) } })) + .start_slot::
(color_swatch) .child(h_flex().overflow_hidden().child(completion_label)) .end_slot::