Blame


1 3b0f3d61 2020-01-22 neels /* Generic infrastructure to implement various diff algorithms. */
2 3b0f3d61 2020-01-22 neels /*
3 3b0f3d61 2020-01-22 neels * Copyright (c) 2020 Neels Hofmeyr <neels@hofmeyr.de>
4 3b0f3d61 2020-01-22 neels *
5 3b0f3d61 2020-01-22 neels * Permission to use, copy, modify, and distribute this software for any
6 3b0f3d61 2020-01-22 neels * purpose with or without fee is hereby granted, provided that the above
7 3b0f3d61 2020-01-22 neels * copyright notice and this permission notice appear in all copies.
8 3b0f3d61 2020-01-22 neels *
9 3b0f3d61 2020-01-22 neels * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 3b0f3d61 2020-01-22 neels * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 3b0f3d61 2020-01-22 neels * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 3b0f3d61 2020-01-22 neels * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 3b0f3d61 2020-01-22 neels * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 3b0f3d61 2020-01-22 neels * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15 3b0f3d61 2020-01-22 neels * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 3b0f3d61 2020-01-22 neels */
17 3b0f3d61 2020-01-22 neels
18 d362ea2e 2020-07-25 stsp struct diff_range {
19 54fa8228 2020-01-27 neels int start;
20 54fa8228 2020-01-27 neels int end;
21 54fa8228 2020-01-27 neels };
22 54fa8228 2020-01-27 neels
23 3b0f3d61 2020-01-22 neels /* List of all possible return codes of a diff invocation. */
24 3e6cba3a 2020-08-13 stsp #define DIFF_RC_USE_DIFF_ALGO_FALLBACK -1
25 3e6cba3a 2020-08-13 stsp #define DIFF_RC_OK 0
26 3e6cba3a 2020-08-13 stsp /* Any positive return values are errno values from sys/errno.h */
27 3b0f3d61 2020-01-22 neels
28 85ab4559 2020-09-22 stsp struct diff_atom;
29 c6eecea3 2020-07-26 stsp
30 0d27172a 2020-05-06 neels /* For each file, there is a "root" struct diff_data referencing the entire
31 0d27172a 2020-05-06 neels * file, which the atoms are parsed from. In recursion of diff algorithm, there
32 0d27172a 2020-05-06 neels * may be "child" struct diff_data only referencing a subsection of the file,
33 0d27172a 2020-05-06 neels * re-using the atoms parsing. For "root" structs, atoms_allocated will be
34 0d27172a 2020-05-06 neels * nonzero, indicating that the array of atoms is owned by that struct. For
35 0d27172a 2020-05-06 neels * "child" structs, atoms_allocated == 0, to indicate that the struct is
36 0d27172a 2020-05-06 neels * referencing a subset of atoms. */
37 3b0f3d61 2020-01-22 neels struct diff_data {
38 7a54ad3a 2020-09-20 stsp FILE *f; /* if root diff_data and not memory-mapped */
39 c6eecea3 2020-07-26 stsp off_t pos; /* if not memory-mapped */
40 c6eecea3 2020-07-26 stsp const uint8_t *data; /* if memory-mapped */
41 c6eecea3 2020-07-26 stsp off_t len;
42 c6eecea3 2020-07-26 stsp
43 3b0f3d61 2020-01-22 neels ARRAYLIST(struct diff_atom) atoms;
44 3b0f3d61 2020-01-22 neels struct diff_data *root;
45 732e8ee0 2020-09-20 stsp
46 00d5652b 2020-09-22 stsp int diff_flags;
47 3b0f3d61 2020-01-22 neels };
48 3b0f3d61 2020-01-22 neels
49 00d5652b 2020-09-22 stsp #define DIFF_FLAG_IGNORE_WHITESPACE 0x00000001
50 00d5652b 2020-09-22 stsp
51 3b0f3d61 2020-01-22 neels void diff_data_free(struct diff_data *diff_data);
52 3b0f3d61 2020-01-22 neels
53 85ab4559 2020-09-22 stsp struct diff_chunk;
54 3b0f3d61 2020-01-22 neels typedef ARRAYLIST(struct diff_chunk) diff_chunk_arraylist_t;
55 8546b045 2020-09-20 neels
56 3b0f3d61 2020-01-22 neels struct diff_result {
57 3e6cba3a 2020-08-13 stsp int rc;
58 3b0f3d61 2020-01-22 neels struct diff_data left;
59 3b0f3d61 2020-01-22 neels struct diff_data right;
60 3b0f3d61 2020-01-22 neels diff_chunk_arraylist_t chunks;
61 3b0f3d61 2020-01-22 neels };
62 3b0f3d61 2020-01-22 neels
63 85ab4559 2020-09-22 stsp struct diff_state;
64 3b0f3d61 2020-01-22 neels
65 3b0f3d61 2020-01-22 neels /* Signature of a utility function to divide both source files into diff atoms.
66 0d27172a 2020-05-06 neels * It is possible that a (future) algorithm requires both source files to decide
67 0d27172a 2020-05-06 neels * on atom split points, hence this gets both left and right to atomize at the
68 0d27172a 2020-05-06 neels * same time.
69 3b0f3d61 2020-01-22 neels * An example is diff_atomize_text_by_line() in diff_atomize_text.c.
70 3b0f3d61 2020-01-22 neels *
71 3b0f3d61 2020-01-22 neels * func_data: context pointer (free to be used by implementation).
72 0d27172a 2020-05-06 neels * left: struct diff_data with left->data and left->len already set up, and
73 0d27172a 2020-05-06 neels * left->atoms to be created.
74 0d27172a 2020-05-06 neels * right: struct diff_data with right->data and right->len already set up, and
75 0d27172a 2020-05-06 neels * right->atoms to be created.
76 3b0f3d61 2020-01-22 neels */
77 3e6cba3a 2020-08-13 stsp typedef int (*diff_atomize_func_t)(void *func_data,
78 0d27172a 2020-05-06 neels struct diff_data *left,
79 0d27172a 2020-05-06 neels struct diff_data *right);
80 3b0f3d61 2020-01-22 neels
81 3e6cba3a 2020-08-13 stsp extern int diff_atomize_text_by_line(void *func_data,
82 0d27172a 2020-05-06 neels struct diff_data *left,
83 0d27172a 2020-05-06 neels struct diff_data *right);
84 3b0f3d61 2020-01-22 neels
85 3b0f3d61 2020-01-22 neels struct diff_algo_config;
86 3e6cba3a 2020-08-13 stsp typedef int (*diff_algo_impl_t)(
87 0d27172a 2020-05-06 neels const struct diff_algo_config *algo_config, struct diff_state *state);
88 3b0f3d61 2020-01-22 neels
89 0d27172a 2020-05-06 neels /* Form a result with all left-side removed and all right-side added, i.e. no
90 0d27172a 2020-05-06 neels * actual diff algorithm involved. */
91 3e6cba3a 2020-08-13 stsp int diff_algo_none(const struct diff_algo_config *algo_config,
92 0d27172a 2020-05-06 neels struct diff_state *state);
93 3b0f3d61 2020-01-22 neels
94 0d27172a 2020-05-06 neels /* Myers Diff tracing from the start all the way through to the end, requiring
95 0d27172a 2020-05-06 neels * quadratic amounts of memory. This can fail if the required space surpasses
96 0d27172a 2020-05-06 neels * algo_config->permitted_state_size. */
97 3e6cba3a 2020-08-13 stsp extern int diff_algo_myers(const struct diff_algo_config *algo_config,
98 0d27172a 2020-05-06 neels struct diff_state *state);
99 3b0f3d61 2020-01-22 neels
100 0d27172a 2020-05-06 neels /* Myers "Divide et Impera": tracing forwards from the start and backwards from
101 0d27172a 2020-05-06 neels * the end to find a midpoint that divides the problem into smaller chunks.
102 0d27172a 2020-05-06 neels * Requires only linear amounts of memory. */
103 3e6cba3a 2020-08-13 stsp extern int diff_algo_myers_divide(
104 0d27172a 2020-05-06 neels const struct diff_algo_config *algo_config, struct diff_state *state);
105 3b0f3d61 2020-01-22 neels
106 0d27172a 2020-05-06 neels /* Patience Diff algorithm, which divides a larger diff into smaller chunks. For
107 0d27172a 2020-05-06 neels * very specific scenarios, it may lead to a complete diff result by itself, but
108 0d27172a 2020-05-06 neels * needs a fallback algo to solve chunks that don't have common-unique atoms. */
109 3e6cba3a 2020-08-13 stsp extern int diff_algo_patience(
110 0d27172a 2020-05-06 neels const struct diff_algo_config *algo_config, struct diff_state *state);
111 3b0f3d61 2020-01-22 neels
112 3b0f3d61 2020-01-22 neels /* Diff algorithms to use, possibly nested. For example:
113 3b0f3d61 2020-01-22 neels *
114 3b0f3d61 2020-01-22 neels * struct diff_algo_config myers, patience, myers_divide;
115 3b0f3d61 2020-01-22 neels *
116 3b0f3d61 2020-01-22 neels * myers = (struct diff_algo_config){
117 3b0f3d61 2020-01-22 neels * .impl = diff_algo_myers,
118 3b0f3d61 2020-01-22 neels * .permitted_state_size = 32 * 1024 * 1024,
119 0d27172a 2020-05-06 neels * // When too large, do diff_algo_patience:
120 0d27172a 2020-05-06 neels * .fallback_algo = &patience,
121 3b0f3d61 2020-01-22 neels * };
122 3b0f3d61 2020-01-22 neels *
123 0d27172a 2020-05-06 neels * const struct diff_algo_config patience = (struct diff_algo_config){
124 0d27172a 2020-05-06 neels * .impl = diff_algo_patience,
125 0d27172a 2020-05-06 neels * // After subdivision, do Patience again:
126 0d27172a 2020-05-06 neels * .inner_algo = &patience,
127 0d27172a 2020-05-06 neels * // If subdivision failed, do Myers Divide et Impera:
128 0d27172a 2020-05-06 neels * .fallback_algo = &myers_then_myers_divide,
129 3b0f3d61 2020-01-22 neels * };
130 0d27172a 2020-05-06 neels *
131 0d27172a 2020-05-06 neels * const struct diff_algo_config myers_divide = (struct diff_algo_config){
132 0d27172a 2020-05-06 neels * .impl = diff_algo_myers_divide,
133 0d27172a 2020-05-06 neels * // When division succeeded, start from the top:
134 0d27172a 2020-05-06 neels * .inner_algo = &myers_then_myers_divide,
135 0d27172a 2020-05-06 neels * // (fallback_algo = NULL implies diff_algo_none).
136 3b0f3d61 2020-01-22 neels * };
137 3b0f3d61 2020-01-22 neels * struct diff_config config = {
138 3b0f3d61 2020-01-22 neels * .algo = &myers,
139 3b0f3d61 2020-01-22 neels * ...
140 3b0f3d61 2020-01-22 neels * };
141 3b0f3d61 2020-01-22 neels * diff_main(&config, ...);
142 3b0f3d61 2020-01-22 neels */
143 3b0f3d61 2020-01-22 neels struct diff_algo_config {
144 3b0f3d61 2020-01-22 neels diff_algo_impl_t impl;
145 3b0f3d61 2020-01-22 neels
146 0d27172a 2020-05-06 neels /* Fail this algo if it would use more than this amount of memory, and
147 0d27172a 2020-05-06 neels * instead use fallback_algo (diff_algo_myers). permitted_state_size ==
148 0d27172a 2020-05-06 neels * 0 means no limitation. */
149 3b0f3d61 2020-01-22 neels size_t permitted_state_size;
150 3b0f3d61 2020-01-22 neels
151 0d27172a 2020-05-06 neels /* For algorithms that divide into smaller chunks, use this algorithm to
152 0d27172a 2020-05-06 neels * solve the divided chunks. */
153 3b0f3d61 2020-01-22 neels const struct diff_algo_config *inner_algo;
154 3b0f3d61 2020-01-22 neels
155 0d27172a 2020-05-06 neels /* If the algorithm fails (e.g. diff_algo_myers_if_small needs too large
156 0d27172a 2020-05-06 neels * state, or diff_algo_patience can't find any common-unique atoms),
157 0d27172a 2020-05-06 neels * then use this algorithm instead. */
158 3b0f3d61 2020-01-22 neels const struct diff_algo_config *fallback_algo;
159 3b0f3d61 2020-01-22 neels };
160 3b0f3d61 2020-01-22 neels
161 3b0f3d61 2020-01-22 neels struct diff_config {
162 3b0f3d61 2020-01-22 neels diff_atomize_func_t atomize_func;
163 3b0f3d61 2020-01-22 neels void *atomize_func_data;
164 3b0f3d61 2020-01-22 neels
165 3b0f3d61 2020-01-22 neels const struct diff_algo_config *algo;
166 3b0f3d61 2020-01-22 neels
167 0d27172a 2020-05-06 neels /* How deep to step into subdivisions of a source file, a paranoia /
168 0d27172a 2020-05-06 neels * safety measure to guard against infinite loops through diff
169 0d27172a 2020-05-06 neels * algorithms. When the maximum recursion is reached, employ
170 0d27172a 2020-05-06 neels * diff_algo_none (i.e. remove all left atoms and add all right atoms).
171 0d27172a 2020-05-06 neels */
172 3b0f3d61 2020-01-22 neels unsigned int max_recursion_depth;
173 3b0f3d61 2020-01-22 neels };
174 3b0f3d61 2020-01-22 neels
175 3b0f3d61 2020-01-22 neels struct diff_result *diff_main(const struct diff_config *config,
176 7a54ad3a 2020-09-20 stsp FILE *left_f, const uint8_t *left_data,
177 c6eecea3 2020-07-26 stsp off_t left_len,
178 7a54ad3a 2020-09-20 stsp FILE *right_f, const uint8_t *right_data,
179 00d5652b 2020-09-22 stsp off_t right_len, int diff_flags);
180 3b0f3d61 2020-01-22 neels void diff_result_free(struct diff_result *result);