1 /* Implementation of the Patience Diff algorithm invented by Bram Cohen:
2 * Divide a diff problem into smaller chunks by an LCS of common-unique lines. */
4 * Copyright (c) 2020 Neels Hofmeyr <neels@hofmeyr.de>
6 * Permission to use, copy, modify, and distribute this software for any
7 * purpose with or without fee is hereby granted, provided that the above
8 * copyright notice and this permission notice appear in all copies.
10 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
26 #include <arraylist.h>
27 #include <diff_main.h>
29 #include "diff_internal.h"
30 #include "diff_debug.h"
32 int diff_atoms_qsort_compar(const void *_a, const void *_b)
34 const struct diff_atom *a = *(struct diff_atom**)_a;
35 const struct diff_atom *b = *(struct diff_atom**)_b;
39 if (a->root->err || b->root->err) {
40 /* If atoms are from more than one diff_data, make sure the
41 * error, if any, spreads to all of them. */
47 /* Sort by the simplistic hash */
48 if (a->hash < b->hash)
50 if (a->hash > b->hash)
53 /* If hashes are the same, the lines may still differ. Do a full cmp. */
54 rc = diff_atom_cmp(&cmp, a, b);
57 /* Mark the I/O error so that the caller can find out about it.
58 * For the case atoms are from more than one diff_data, mark in
61 if (a->root != b->root)
69 /* Sort an array of struct diff_atom* in-place. */
70 static int diff_atoms_qsort(struct diff_atom *atoms[],
73 qsort(atoms, atoms_count, sizeof(struct diff_atom*),
74 diff_atoms_qsort_compar);
75 return atoms[0]->root->err;
79 diff_atoms_mark_unique_in_both_by_qsort(struct diff_data *left,
80 struct diff_data *right,
81 unsigned int *unique_in_both_count_p)
85 struct diff_atom **all_atoms =
86 malloc((left->atoms.len + right->atoms.len)
87 * sizeof(struct diff_atom*));
90 unsigned int unique_in_both_count = 0;
94 diff_data_foreach_atom(a, left) {
97 diff_data_foreach_atom(b, right) {
101 rc = diff_atoms_qsort(all_atoms, len);
105 /* Now we have a sorted array of atom pointers. All similar lines are
106 * adjacent. Walk through the array and mark those that are unique on
107 * each side, but exist once in both sources. */
108 for (i = 0; i < len; i++) {
111 unsigned int count_first_side = 1;
112 unsigned int count_other_side = 0;
115 for (j = i+1; j < len; j++) {
117 rc = diff_atom_same(&same, a, b);
122 /* A following atom is the same. See on which side the
123 * repetition counts. */
124 if (a->root == b->root)
130 /* Counted a section of similar atoms, put the results back to
132 if ((count_first_side == 1)
133 && (count_other_side == 1)) {
135 a->patience.unique_in_both = true;
136 a->patience.pos_in_other = b;
137 b->patience.unique_in_both = true;
138 b->patience.pos_in_other = a;
139 unique_in_both_count++;
142 *unique_in_both_count_p = unique_in_both_count;
150 diff_atoms_swallow_identical_neighbors(struct diff_data *left,
151 struct diff_data *right,
152 unsigned int *unique_in_both_count)
154 debug("trivially combine identical lines"
155 " around unique_in_both lines\n");
158 unsigned int next_l_idx;
159 unsigned int l_min = 0;
160 unsigned int r_min = 0;
161 for (l_idx = 0; l_idx < left->atoms.len; l_idx = next_l_idx) {
162 next_l_idx = l_idx + 1;
163 struct diff_atom *l = &left->atoms.head[l_idx];
165 if (!l->patience.unique_in_both)
168 debug("check identical lines around ");
169 debug_dump_atom(left, right, l);
171 unsigned int r_idx = diff_atom_idx(right,
172 l->patience.pos_in_other);
174 struct diff_range identical_l;
175 struct diff_range identical_r;
178 * Each common-unique line swallows identical lines upwards and
180 * All common-unique lines that were part of the identical lines
181 * following below were already swallowed in the previous
182 * iteration, so we will never hit another common-unique line
184 for (identical_l.start = l_idx, identical_r.start = r_idx;
185 identical_l.start > l_min && identical_r.start > r_min;
186 identical_l.start--, identical_r.start--) {
188 int r = diff_atom_same(&same,
189 &left->atoms.head[identical_l.start - 1],
190 &right->atoms.head[identical_r.start - 1]);
197 /* Swallow downwards */
198 for (identical_l.end = l_idx + 1, identical_r.end = r_idx + 1;
199 identical_l.end < left->atoms.len
200 && identical_r.end < right->atoms.len;
201 identical_l.end++, identical_r.end++,
203 struct diff_atom *l_end;
204 struct diff_atom *r_end;
206 int r = diff_atom_same(&same,
207 &left->atoms.head[identical_l.end],
208 &right->atoms.head[identical_r.end]);
213 l_end = &left->atoms.head[identical_l.end];
214 r_end = &right->atoms.head[identical_r.end];
215 if (!l_end->patience.unique_in_both)
217 /* Part of a chunk of identical lines, remove from
218 * listing of unique_in_both lines */
219 l_end->patience.unique_in_both = false;
220 r_end->patience.unique_in_both = false;
221 (*unique_in_both_count)--;
224 l->patience.identical_lines = identical_l;
225 l->patience.pos_in_other->patience.identical_lines =
228 l_min = identical_l.end;
229 r_min = identical_r.end;
231 if (!diff_range_empty(&l->patience.identical_lines)) {
232 debug("common-unique line at l=%u r=%u swallowed"
233 " identical lines l=%u-%u r=%u-%u\n",
235 identical_l.start, identical_l.end,
236 identical_r.start, identical_r.end);
238 debug("next_l_idx = %u\n", next_l_idx);
243 /* binary search to find the stack to put this atom "card" on. */
245 find_target_stack(struct diff_atom *atom,
246 struct diff_atom **patience_stacks,
247 unsigned int patience_stacks_count)
250 unsigned int hi = patience_stacks_count;
252 unsigned int mid = (lo + hi) >> 1;
254 if (patience_stacks[mid]->patience.pos_in_other
255 < atom->patience.pos_in_other)
263 /* Among the lines that appear exactly once in each side, find the longest
264 * streak that appear in both files in the same order (with other stuff allowed
265 * to interleave). Use patience sort for that, as in the Patience Diff
267 * See https://bramcohen.livejournal.com/73318.html and, for a much more
268 * detailed explanation,
269 * https://blog.jcoglan.com/2017/09/19/the-patience-diff-algorithm/ */
271 diff_algo_patience(const struct diff_algo_config *algo_config,
272 struct diff_state *state)
276 struct diff_data *left = &state->left;
277 struct diff_data *right = &state->right;
279 unsigned int unique_in_both_count;
281 debug("\n** %s\n", __func__);
283 /* Find those lines that appear exactly once in 'left' and exactly once
285 rc = diff_atoms_mark_unique_in_both_by_qsort(left, right,
286 &unique_in_both_count);
290 debug("unique_in_both_count %u\n", unique_in_both_count);
296 if (!unique_in_both_count) {
297 /* Cannot apply Patience, tell the caller to use fallback_algo
299 return DIFF_RC_USE_DIFF_ALGO_FALLBACK;
302 rc = diff_atoms_swallow_identical_neighbors(left, right,
303 &unique_in_both_count);
306 debug("After swallowing identical neighbors: unique_in_both = %u\n",
307 unique_in_both_count);
311 /* An array of Longest Common Sequence is the result of the below
313 unsigned int lcs_count = 0;
314 struct diff_atom **lcs = NULL;
315 struct diff_atom *lcs_tail = NULL;
318 /* This subscope marks the lifetime of the atom_pointers
321 /* One chunk of storage for atom pointers */
322 struct diff_atom **atom_pointers;
323 atom_pointers = recallocarray(NULL, 0, unique_in_both_count * 2,
324 sizeof(struct diff_atom*));
326 /* Half for the list of atoms that still need to be put on
328 struct diff_atom **uniques = atom_pointers;
330 /* Half for the patience sort state's "card stacks" -- we
331 * remember only each stack's topmost "card" */
332 struct diff_atom **patience_stacks;
333 patience_stacks = atom_pointers + unique_in_both_count;
334 unsigned int patience_stacks_count = 0;
336 /* Take all common, unique items from 'left' ... */
338 struct diff_atom *atom;
339 struct diff_atom **uniques_end = uniques;
340 diff_data_foreach_atom(atom, left) {
341 if (!atom->patience.unique_in_both)
347 /* ...and sort them to the order found in 'right'.
348 * The idea is to find the leftmost stack that has a higher line
349 * number and add it to the stack's top.
350 * If there is no such stack, open a new one on the right. The
351 * line number is derived from the atom*, which are array items
352 * and hence reflect the relative position in the source file.
353 * So we got the common-uniques from 'left' and sort them
354 * according to atom->patience.pos_in_other. */
356 for (i = 0; i < unique_in_both_count; i++) {
358 unsigned int target_stack;
359 target_stack = find_target_stack(atom, patience_stacks,
360 patience_stacks_count);
361 assert(target_stack <= patience_stacks_count);
362 patience_stacks[target_stack] = atom;
363 if (target_stack == patience_stacks_count)
364 patience_stacks_count++;
366 /* Record a back reference to the next stack on the
367 * left, which will form the final longest sequence
369 atom->patience.prev_stack = target_stack ?
370 patience_stacks[target_stack - 1] : NULL;
374 /* backtrace through prev_stack references to form the final
375 * longest common sequence */
376 lcs_tail = patience_stacks[patience_stacks_count - 1];
377 lcs_count = patience_stacks_count;
379 /* uniques and patience_stacks are no longer needed.
380 * Backpointers are in atom->patience.prev_stack */
384 lcs = recallocarray(NULL, 0, lcs_count, sizeof(struct diff_atom*));
385 struct diff_atom **lcs_backtrace_pos = &lcs[lcs_count - 1];
386 struct diff_atom *atom;
387 for (atom = lcs_tail; atom; atom = atom->patience.prev_stack, lcs_backtrace_pos--) {
388 assert(lcs_backtrace_pos >= lcs);
389 *lcs_backtrace_pos = atom;
394 debug("\npatience LCS:\n");
395 for (i = 0; i < lcs_count; i++) {
396 debug_dump_atom(left, right, lcs[i]);
401 /* TODO: For each common-unique line found (now listed in lcs), swallow
402 * lines upwards and downwards that are identical on each side. Requires
403 * a way to represent atoms being glued to adjacent atoms. */
405 debug("\ntraverse LCS, possibly recursing:\n");
407 /* Now we have pinned positions in both files at which it makes sense to
408 * divide the diff problem into smaller chunks. Go into the next round:
409 * look at each section in turn, trying to again find common-unique
410 * lines in those smaller sections. As soon as no more are found, the
411 * remaining smaller sections are solved by Myers. */
412 unsigned int left_pos = 0;
413 unsigned int right_pos = 0;
414 for (i = 0; i <= lcs_count; i++) {
415 struct diff_atom *atom;
416 struct diff_atom *atom_r;
417 unsigned int left_idx;
418 unsigned int right_idx;
422 atom_r = atom->patience.pos_in_other;
423 debug("lcs[%u] = left[%u] = right[%u]\n", i,
424 diff_atom_idx(left, atom), diff_atom_idx(right, atom_r));
425 left_idx = atom->patience.identical_lines.start;
426 right_idx = atom_r->patience.identical_lines.start;
427 debug(" identical lines l %u-%u r %u-%u\n",
428 atom->patience.identical_lines.start, atom->patience.identical_lines.end,
429 atom_r->patience.identical_lines.start, atom_r->patience.identical_lines.end);
433 left_idx = left->atoms.len;
434 right_idx = right->atoms.len;
437 /* 'atom' now marks an atom that matches on both sides according
438 * to patience-diff (a common-unique identical atom in both
440 * Handle the section before and the atom itself; the section
441 * after will be handled by the next loop iteration -- note that
442 * i loops to last element + 1 ("i <= lcs_count"), so that there
443 * will be another final iteration to pick up the last remaining
444 * items after the last LCS atom.
445 * The sections before might also be empty on left and/or right.
446 * left_pos and right_pos mark the indexes of the first atoms
447 * that have not yet been handled in the previous loop
448 * iteration. left_idx and right_idx mark the indexes of the
449 * matching atom on left and right, respectively. */
451 debug("iteration %u left_pos %u left_idx %u"
452 " right_pos %u right_idx %u\n",
453 i, left_pos, left_idx, right_pos, right_idx);
455 /* Section before the matching atom */
456 struct diff_atom *left_atom = &left->atoms.head[left_pos];
457 unsigned int left_section_len = left_idx - left_pos;
459 struct diff_atom *right_atom = &(right->atoms.head[right_pos]);
460 unsigned int right_section_len = right_idx - right_pos;
462 if (left_section_len && right_section_len) {
463 /* Record an unsolved chunk, the caller will apply
464 * inner_algo() on this chunk. */
465 if (!diff_state_add_chunk(state, false,
466 left_atom, left_section_len,
470 } else if (left_section_len && !right_section_len) {
471 /* Only left atoms and none on the right, they form a
472 * "minus" chunk, then. */
473 if (!diff_state_add_chunk(state, true,
474 left_atom, left_section_len,
477 } else if (!left_section_len && right_section_len) {
478 /* No left atoms, only atoms on the right, they form a
479 * "plus" chunk, then. */
480 if (!diff_state_add_chunk(state, true,
482 right_atom, right_section_len))
485 /* else: left_section_len == 0 and right_section_len == 0, i.e.
488 /* The atom found to match on both sides forms a chunk of equals
489 * on each side. In the very last iteration of this loop, there
490 * is no matching atom, we were just cleaning out the remaining
494 ok = diff_state_add_chunk(state, true,
496 + atom->patience.identical_lines.start,
497 diff_range_len(&atom->patience.identical_lines),
499 + atom_r->patience.identical_lines.start,
500 diff_range_len(&atom_r->patience.identical_lines));
503 left_pos = atom->patience.identical_lines.end;
504 right_pos = atom_r->patience.identical_lines.end;
506 left_pos = left_idx + 1;
507 right_pos = right_idx + 1;
509 debug("end of iteration %u left_pos %u left_idx %u"
510 " right_pos %u right_idx %u\n",
511 i, left_pos, left_idx, right_pos, right_idx);
513 debug("** END %s\n", __func__);