Skip to content

Commit

Permalink
Merge branch 'johan/conflict-context-ii'
Browse files Browse the repository at this point in the history
Fixes #65.
  • Loading branch information
walles committed May 20, 2024
2 parents 645462f + a6ffa2e commit e04ac0b
Show file tree
Hide file tree
Showing 5 changed files with 80 additions and 32 deletions.
6 changes: 3 additions & 3 deletions src/conflicts_highlighter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ impl ConflictsHighlighter {
let c1_or_newline = if c1.is_empty() { "\n" } else { &c1 };
let c2_or_newline = if c2.is_empty() { "\n" } else { &c2 };
let (c1_tokens, c2_tokens, _, _) =
refiner::to_highlighted_tokens(c1_or_newline, c2_or_newline);
refiner::to_highlighted_tokens(c1_or_newline, c2_or_newline, false);

let c1_style = if base_header.is_empty() {
LINE_STYLE_OLD
Expand Down Expand Up @@ -262,13 +262,13 @@ impl ConflictsHighlighter {

let c1_or_newline = if c1.is_empty() { "\n" } else { &c1 };
let (base_vs_c1_tokens, c1_tokens, _, _) =
refiner::to_highlighted_tokens(base_or_newline, c1_or_newline);
refiner::to_highlighted_tokens(base_or_newline, c1_or_newline, true);
let highlighted_c1 =
token_collector::render(&LINE_STYLE_NEW, c1_prefix, &c1_tokens);

let c2_or_newline = if c2.is_empty() { "\n" } else { &c2 };
let (base_vs_c2_tokens, c2_tokens, _, _) =
refiner::to_highlighted_tokens(base_or_newline, c2_or_newline);
refiner::to_highlighted_tokens(base_or_newline, c2_or_newline, true);
let highlighted_c2 =
token_collector::render(&LINE_STYLE_NEW, c2_prefix, &c2_tokens);

Expand Down
2 changes: 1 addition & 1 deletion src/plusminus_header_highlighter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ impl PlusMinusHeaderHighlighter {
}

let (mut old_tokens, mut new_tokens, _, _) =
to_highlighted_tokens(&self.old_name, &self.new_name);
to_highlighted_tokens(&self.old_name, &self.new_name, false);

lowlight_timestamp(&mut old_tokens);
lowlight_timestamp(&mut new_tokens);
Expand Down
8 changes: 7 additions & 1 deletion src/refiner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ pub fn format(prefixes: &[&str], prefix_texts: &[&str]) -> Vec<String> {
new_tokens_internal,
old_highlights_internal,
new_unhighlighted_internal,
) = to_highlighted_tokens(old_text, new_text);
) = to_highlighted_tokens(old_text, new_text, false);

old_tokens.push(old_tokens_internal);
old_highlights |= old_highlights_internal;
Expand Down Expand Up @@ -167,9 +167,12 @@ pub fn format(prefixes: &[&str], prefix_texts: &[&str]) -> Vec<String> {
/// `old_text` and `new_text` are multi lines strings. Having or not having
/// trailing newlines will affect tokenization. The lines are not expected to
/// have any prefixes like `+` or `-`.
///
/// Conflict diffs are highlighted somewhat differently from regular diffs.
pub fn to_highlighted_tokens(
old_text: &str,
new_text: &str,
is_three_way_conflict: bool,
) -> (Vec<StyledToken>, Vec<StyledToken>, bool, bool) {
// Find diffs between adds and removals
let mut old_tokens = Vec::new();
Expand Down Expand Up @@ -228,6 +231,9 @@ pub fn to_highlighted_tokens(
bridge_consecutive_highlighted_tokens(&mut old_tokens);
unhighlight_noisy_rows(&mut old_tokens);

if is_three_way_conflict {
contextualize_unhighlighted_lines(&mut new_tokens);
}
bridge_consecutive_highlighted_tokens(&mut new_tokens);
let new_unhighlighted = unhighlight_noisy_rows(&mut new_tokens);
highlight_trailing_whitespace(&mut new_tokens);
Expand Down
54 changes: 48 additions & 6 deletions src/token_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,11 @@ use crate::ansi::ANSI_STYLE_NORMAL;

#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum Style {
Lowlighted,
Context,
Plain,
Highlighted,
Error,
Lowlighted,
}

#[derive(Clone, Debug, PartialEq, Eq)]
Expand Down Expand Up @@ -200,18 +201,23 @@ fn render_row(line_style: &LineStyle, prefix: &str, row: &[StyledToken]) -> Stri
// Render tokens
for token in row {
let new_style = match token.style {
Style::Plain => line_style.plain_style,
Style::Highlighted => line_style.highlighted_style,
Style::Error => AnsiStyle {
inverse: true,
Style::Context => AnsiStyle {
inverse: false,
weight: Weight::Normal,
color: Red,
color: Default,
},
Style::Lowlighted => AnsiStyle {
inverse: false,
weight: Weight::Faint,
color: Default,
},
Style::Plain => line_style.plain_style,
Style::Highlighted => line_style.highlighted_style,
Style::Error => AnsiStyle {
inverse: true,
weight: Weight::Normal,
color: Red,
},
};

rendered.push_str(&new_style.from(&current_style));
Expand Down Expand Up @@ -365,6 +371,42 @@ pub(crate) fn align_tabs(old: &mut [StyledToken], new: &mut [StyledToken]) {
new[new_tab_index_token].token = new_spaces;
}

/// If a line contains only plain tokens, style all tokens on that line as
/// context. Lines are separated by newline tokens.
///
/// This can happen during conflicts highlighting.
pub fn contextualize_unhighlighted_lines(tokens: &mut [StyledToken]) {
let mut line_start = 0;
for i in 0..tokens.len() {
if tokens[i].token != "\n" {
continue;
}

// Line ended

if tokens[line_start..(i + 1)]
.iter()
.all(|token| token.style == Style::Plain)
{
// Line contains only plain tokens
for token in &mut tokens[line_start..i] {
token.style = Style::Context;
}
}
line_start = i + 1;
}

// Handle the last line
if tokens[line_start..]
.iter()
.all(|token| token.style == Style::Plain)
{
for token in &mut tokens[line_start..] {
token.style = Style::Context;
}
}
}

/// Highlight single space between two highlighted tokens
pub fn bridge_consecutive_highlighted_tokens(tokens: &mut [StyledToken]) {
enum FoundState {
Expand Down
42 changes: 21 additions & 21 deletions testdata/conflict-with-context.riff-output
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@
)

++<<<<<<< HEAD
 +func (p *Pager) scrollToSearchHits() {
 + if p.searchPattern == nil {
 + // This is not a search
 + return
 + }
 +func (p *Pager) scrollToSearchHits() {
 + if p.searchPattern == nil {
 + // This is not a search
 + return
 + }
 +
 + lineNumber := p.scrollPosition.lineNumber(p)
 + if lineNumber == nil {
Expand All @@ -21,21 +21,21 @@
 +
 + firstHitPosition := p.findFirstHit(*lineNumber, nil, false)
 + if firstHitPosition == nil && (*lineNumber != linenumbers.LineNumber{}) {
 + // Try again from the top
 + firstHitPosition = p.findFirstHit(linenumbers.LineNumber{}, lineNumber, false)
 + }
 + if firstHitPosition == nil {
 + // No match, give up
 + return
 + }
 + // Try again from the top
 + firstHitPosition = p.findFirstHit(linenumbers.LineNumber{}, lineNumber, false)
 + }
 + if firstHitPosition == nil {
 + // No match, give up
 + return
 + }
 +
 + if firstHitPosition.isVisible(p) {
 + // Already on-screen, never mind
 + return
 + }
 + if firstHitPosition.isVisible(p) {
 + // Already on-screen, never mind
 + return
 + }
 +
 + p.scrollPosition = *firstHitPosition
 +}
 + p.scrollPosition = *firstHitPosition
 +}
 +
 +// NOTE: When we search, we do that by looping over the *input lines*, not the
 +// screen lines. That's why startPosition is a LineNumber rather than a
Expand Down Expand Up @@ -75,9 +75,9 @@
++// the screen lines. That's why we're using a line number rather than a
++// scrollPosition for searching.
++=======
+ // NOTE: When we search, we do that by looping over the *input lines*, not
+ // the screen lines. That's why we're using a line number rather than a
+ // scrollPosition for searching.
+ // NOTE: When we search, we do that by looping over the *input lines*, not
+ // the screen lines. That's why we're using a line number rather than a
+ // scrollPosition for searching.
++>>>>>>> b835e9a (Fix the warnings)
//
// FIXME: We should take startPosition.deltaScreenLines into account as well!
Expand Down

0 comments on commit e04ac0b

Please sign in to comment.