Show More
@@ -1,440 +1,524 | |||
|
1 | 1 | # This file is automatically @generated by Cargo. |
|
2 | 2 | # It is not intended for manual editing. |
|
3 | 3 | [[package]] |
|
4 | 4 | name = "aho-corasick" |
|
5 | 5 | version = "0.7.6" |
|
6 | 6 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
7 | 7 | dependencies = [ |
|
8 | 8 | "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
9 | 9 | ] |
|
10 | 10 | |
|
11 | 11 | [[package]] |
|
12 | 12 | name = "arrayvec" |
|
13 | 13 | version = "0.4.12" |
|
14 | 14 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
15 | 15 | dependencies = [ |
|
16 | 16 | "nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", |
|
17 | 17 | ] |
|
18 | 18 | |
|
19 | 19 | [[package]] |
|
20 | 20 | name = "autocfg" |
|
21 | 21 | version = "0.1.6" |
|
22 | 22 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
23 | 23 | |
|
24 | 24 | [[package]] |
|
25 | 25 | name = "bitflags" |
|
26 | 26 | version = "1.2.1" |
|
27 | 27 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
28 | 28 | |
|
29 | 29 | [[package]] |
|
30 | 30 | name = "byteorder" |
|
31 | 31 | version = "1.3.2" |
|
32 | 32 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
33 | 33 | |
|
34 | 34 | [[package]] |
|
35 | name = "c2-chacha" | |
|
36 | version = "0.2.2" | |
|
37 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
|
38 | dependencies = [ | |
|
39 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
40 | "ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
41 | ] | |
|
42 | ||
|
43 | [[package]] | |
|
35 | 44 | name = "cfg-if" |
|
36 | 45 | version = "0.1.10" |
|
37 | 46 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
38 | 47 | |
|
39 | 48 | [[package]] |
|
40 | 49 | name = "cloudabi" |
|
41 | 50 | version = "0.0.3" |
|
42 | 51 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
43 | 52 | dependencies = [ |
|
44 | 53 | "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
45 | 54 | ] |
|
46 | 55 | |
|
47 | 56 | [[package]] |
|
48 | 57 | name = "cpython" |
|
49 | 58 | version = "0.3.0" |
|
50 | 59 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
51 | 60 | dependencies = [ |
|
52 | 61 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", |
|
53 | 62 | "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", |
|
54 | 63 | "python27-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
55 | 64 | "python3-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
56 | 65 | ] |
|
57 | 66 | |
|
58 | 67 | [[package]] |
|
59 | 68 | name = "crossbeam-deque" |
|
60 | 69 | version = "0.7.1" |
|
61 | 70 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
62 | 71 | dependencies = [ |
|
63 | 72 | "crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
64 | 73 | "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
65 | 74 | ] |
|
66 | 75 | |
|
67 | 76 | [[package]] |
|
68 | 77 | name = "crossbeam-epoch" |
|
69 | 78 | version = "0.7.2" |
|
70 | 79 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
71 | 80 | dependencies = [ |
|
72 | 81 | "arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", |
|
73 | 82 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", |
|
74 | 83 | "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
75 | 84 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
76 | 85 | "memoffset 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
77 | 86 | "scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
78 | 87 | ] |
|
79 | 88 | |
|
80 | 89 | [[package]] |
|
81 | 90 | name = "crossbeam-queue" |
|
82 | 91 | version = "0.1.2" |
|
83 | 92 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
84 | 93 | dependencies = [ |
|
85 | 94 | "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
86 | 95 | ] |
|
87 | 96 | |
|
88 | 97 | [[package]] |
|
89 | 98 | name = "crossbeam-utils" |
|
90 | 99 | version = "0.6.6" |
|
91 | 100 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
92 | 101 | dependencies = [ |
|
93 | 102 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", |
|
94 | 103 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
95 | 104 | ] |
|
96 | 105 | |
|
97 | 106 | [[package]] |
|
98 | 107 | name = "either" |
|
99 | 108 | version = "1.5.3" |
|
100 | 109 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
101 | 110 | |
|
102 | 111 | [[package]] |
|
103 | 112 | name = "fuchsia-cprng" |
|
104 | 113 | version = "0.1.1" |
|
105 | 114 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
106 | 115 | |
|
107 | 116 | [[package]] |
|
117 | name = "getrandom" | |
|
118 | version = "0.1.12" | |
|
119 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
|
120 | dependencies = [ | |
|
121 | "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
122 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
123 | "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
124 | ] | |
|
125 | ||
|
126 | [[package]] | |
|
108 | 127 | name = "hg-core" |
|
109 | 128 | version = "0.1.0" |
|
110 | 129 | dependencies = [ |
|
111 | 130 | "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
112 | 131 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
113 | 132 | "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
114 | 133 | "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", |
|
115 | 134 | "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
116 | 135 | "rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
117 | 136 | "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
137 | "twox-hash 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
118 | 138 | ] |
|
119 | 139 | |
|
120 | 140 | [[package]] |
|
121 | 141 | name = "hg-cpython" |
|
122 | 142 | version = "0.1.0" |
|
123 | 143 | dependencies = [ |
|
124 | 144 | "cpython 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
125 | 145 | "hg-core 0.1.0", |
|
126 | 146 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", |
|
127 | 147 | ] |
|
128 | 148 | |
|
129 | 149 | [[package]] |
|
130 | 150 | name = "hgdirectffi" |
|
131 | 151 | version = "0.1.0" |
|
132 | 152 | dependencies = [ |
|
133 | 153 | "hg-core 0.1.0", |
|
134 | 154 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", |
|
135 | 155 | ] |
|
136 | 156 | |
|
137 | 157 | [[package]] |
|
138 | 158 | name = "lazy_static" |
|
139 | 159 | version = "1.4.0" |
|
140 | 160 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
141 | 161 | |
|
142 | 162 | [[package]] |
|
143 | 163 | name = "libc" |
|
144 | 164 | version = "0.2.64" |
|
145 | 165 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
146 | 166 | |
|
147 | 167 | [[package]] |
|
148 | 168 | name = "memchr" |
|
149 | 169 | version = "2.2.1" |
|
150 | 170 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
151 | 171 | |
|
152 | 172 | [[package]] |
|
153 | 173 | name = "memoffset" |
|
154 | 174 | version = "0.5.1" |
|
155 | 175 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
156 | 176 | dependencies = [ |
|
157 | 177 | "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", |
|
158 | 178 | ] |
|
159 | 179 | |
|
160 | 180 | [[package]] |
|
161 | 181 | name = "nodrop" |
|
162 | 182 | version = "0.1.14" |
|
163 | 183 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
164 | 184 | |
|
165 | 185 | [[package]] |
|
166 | 186 | name = "num-traits" |
|
167 | 187 | version = "0.2.8" |
|
168 | 188 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
169 | 189 | dependencies = [ |
|
170 | 190 | "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
171 | 191 | ] |
|
172 | 192 | |
|
173 | 193 | [[package]] |
|
174 | 194 | name = "num_cpus" |
|
175 | 195 | version = "1.10.1" |
|
176 | 196 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
177 | 197 | dependencies = [ |
|
178 | 198 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", |
|
179 | 199 | ] |
|
180 | 200 | |
|
181 | 201 | [[package]] |
|
202 | name = "ppv-lite86" | |
|
203 | version = "0.2.5" | |
|
204 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
|
205 | ||
|
206 | [[package]] | |
|
182 | 207 | name = "python27-sys" |
|
183 | 208 | version = "0.3.0" |
|
184 | 209 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
185 | 210 | dependencies = [ |
|
186 | 211 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", |
|
187 | 212 | "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
188 | 213 | ] |
|
189 | 214 | |
|
190 | 215 | [[package]] |
|
191 | 216 | name = "python3-sys" |
|
192 | 217 | version = "0.3.0" |
|
193 | 218 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
194 | 219 | dependencies = [ |
|
195 | 220 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", |
|
196 | 221 | "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
197 | 222 | ] |
|
198 | 223 | |
|
199 | 224 | [[package]] |
|
200 | 225 | name = "rand" |
|
201 | 226 | version = "0.6.5" |
|
202 | 227 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
203 | 228 | dependencies = [ |
|
204 | 229 | "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
205 | 230 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", |
|
206 | 231 | "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
207 | 232 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
208 | 233 | "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
209 | 234 | "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
210 | 235 | "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", |
|
211 | 236 | "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", |
|
212 | 237 | "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
213 | 238 | "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
214 | 239 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", |
|
215 | 240 | ] |
|
216 | 241 | |
|
217 | 242 | [[package]] |
|
243 | name = "rand" | |
|
244 | version = "0.7.2" | |
|
245 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
|
246 | dependencies = [ | |
|
247 | "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
248 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
249 | "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
250 | "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
251 | "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
252 | ] | |
|
253 | ||
|
254 | [[package]] | |
|
218 | 255 | name = "rand_chacha" |
|
219 | 256 | version = "0.1.1" |
|
220 | 257 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
221 | 258 | dependencies = [ |
|
222 | 259 | "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
223 | 260 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
224 | 261 | ] |
|
225 | 262 | |
|
226 | 263 | [[package]] |
|
264 | name = "rand_chacha" | |
|
265 | version = "0.2.1" | |
|
266 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
|
267 | dependencies = [ | |
|
268 | "c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
269 | "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
270 | ] | |
|
271 | ||
|
272 | [[package]] | |
|
227 | 273 | name = "rand_core" |
|
228 | 274 | version = "0.3.1" |
|
229 | 275 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
230 | 276 | dependencies = [ |
|
231 | 277 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
232 | 278 | ] |
|
233 | 279 | |
|
234 | 280 | [[package]] |
|
235 | 281 | name = "rand_core" |
|
236 | 282 | version = "0.4.2" |
|
237 | 283 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
238 | 284 | |
|
239 | 285 | [[package]] |
|
286 | name = "rand_core" | |
|
287 | version = "0.5.1" | |
|
288 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
|
289 | dependencies = [ | |
|
290 | "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
291 | ] | |
|
292 | ||
|
293 | [[package]] | |
|
240 | 294 | name = "rand_hc" |
|
241 | 295 | version = "0.1.0" |
|
242 | 296 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
243 | 297 | dependencies = [ |
|
244 | 298 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
245 | 299 | ] |
|
246 | 300 | |
|
247 | 301 | [[package]] |
|
302 | name = "rand_hc" | |
|
303 | version = "0.2.0" | |
|
304 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
|
305 | dependencies = [ | |
|
306 | "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
307 | ] | |
|
308 | ||
|
309 | [[package]] | |
|
248 | 310 | name = "rand_isaac" |
|
249 | 311 | version = "0.1.1" |
|
250 | 312 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
251 | 313 | dependencies = [ |
|
252 | 314 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
253 | 315 | ] |
|
254 | 316 | |
|
255 | 317 | [[package]] |
|
256 | 318 | name = "rand_jitter" |
|
257 | 319 | version = "0.1.4" |
|
258 | 320 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
259 | 321 | dependencies = [ |
|
260 | 322 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", |
|
261 | 323 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
262 | 324 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", |
|
263 | 325 | ] |
|
264 | 326 | |
|
265 | 327 | [[package]] |
|
266 | 328 | name = "rand_os" |
|
267 | 329 | version = "0.1.3" |
|
268 | 330 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
269 | 331 | dependencies = [ |
|
270 | 332 | "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", |
|
271 | 333 | "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
272 | 334 | "libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)", |
|
273 | 335 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
274 | 336 | "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
275 | 337 | "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", |
|
276 | 338 | ] |
|
277 | 339 | |
|
278 | 340 | [[package]] |
|
279 | 341 | name = "rand_pcg" |
|
280 | 342 | version = "0.1.2" |
|
281 | 343 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
282 | 344 | dependencies = [ |
|
283 | 345 | "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
284 | 346 | "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
285 | 347 | ] |
|
286 | 348 | |
|
287 | 349 | [[package]] |
|
288 | 350 | name = "rand_xorshift" |
|
289 | 351 | version = "0.1.1" |
|
290 | 352 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
291 | 353 | dependencies = [ |
|
292 | 354 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
293 | 355 | ] |
|
294 | 356 | |
|
295 | 357 | [[package]] |
|
296 | 358 | name = "rayon" |
|
297 | 359 | version = "1.2.0" |
|
298 | 360 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
299 | 361 | dependencies = [ |
|
300 | 362 | "crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
301 | 363 | "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", |
|
302 | 364 | "rayon-core 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
303 | 365 | ] |
|
304 | 366 | |
|
305 | 367 | [[package]] |
|
306 | 368 | name = "rayon-core" |
|
307 | 369 | version = "1.6.0" |
|
308 | 370 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
309 | 371 | dependencies = [ |
|
310 | 372 | "crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
311 | 373 | "crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", |
|
312 | 374 | "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
313 | 375 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
314 | 376 | "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
315 | 377 | ] |
|
316 | 378 | |
|
317 | 379 | [[package]] |
|
318 | 380 | name = "rdrand" |
|
319 | 381 | version = "0.4.0" |
|
320 | 382 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
321 | 383 | dependencies = [ |
|
322 | 384 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
323 | 385 | ] |
|
324 | 386 | |
|
325 | 387 | [[package]] |
|
326 | 388 | name = "regex" |
|
327 | 389 | version = "1.3.1" |
|
328 | 390 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
329 | 391 | dependencies = [ |
|
330 | 392 | "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
331 | 393 | "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", |
|
332 | 394 | "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", |
|
333 | 395 | "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", |
|
334 | 396 | ] |
|
335 | 397 | |
|
336 | 398 | [[package]] |
|
337 | 399 | name = "regex-syntax" |
|
338 | 400 | version = "0.6.12" |
|
339 | 401 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
340 | 402 | |
|
341 | 403 | [[package]] |
|
342 | 404 | name = "rustc_version" |
|
343 | 405 | version = "0.2.3" |
|
344 | 406 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
345 | 407 | dependencies = [ |
|
346 | 408 | "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
347 | 409 | ] |
|
348 | 410 | |
|
349 | 411 | [[package]] |
|
350 | 412 | name = "scopeguard" |
|
351 | 413 | version = "1.0.0" |
|
352 | 414 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
353 | 415 | |
|
354 | 416 | [[package]] |
|
355 | 417 | name = "semver" |
|
356 | 418 | version = "0.9.0" |
|
357 | 419 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
358 | 420 | dependencies = [ |
|
359 | 421 | "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
360 | 422 | ] |
|
361 | 423 | |
|
362 | 424 | [[package]] |
|
363 | 425 | name = "semver-parser" |
|
364 | 426 | version = "0.7.0" |
|
365 | 427 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
366 | 428 | |
|
367 | 429 | [[package]] |
|
368 | 430 | name = "thread_local" |
|
369 | 431 | version = "0.3.6" |
|
370 | 432 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
371 | 433 | dependencies = [ |
|
372 | 434 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
373 | 435 | ] |
|
374 | 436 | |
|
375 | 437 | [[package]] |
|
438 | name = "twox-hash" | |
|
439 | version = "1.5.0" | |
|
440 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
|
441 | dependencies = [ | |
|
442 | "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", | |
|
443 | ] | |
|
444 | ||
|
445 | [[package]] | |
|
446 | name = "wasi" | |
|
447 | version = "0.7.0" | |
|
448 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
|
449 | ||
|
450 | [[package]] | |
|
376 | 451 | name = "winapi" |
|
377 | 452 | version = "0.3.8" |
|
378 | 453 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
379 | 454 | dependencies = [ |
|
380 | 455 | "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
381 | 456 | "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", |
|
382 | 457 | ] |
|
383 | 458 | |
|
384 | 459 | [[package]] |
|
385 | 460 | name = "winapi-i686-pc-windows-gnu" |
|
386 | 461 | version = "0.4.0" |
|
387 | 462 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
388 | 463 | |
|
389 | 464 | [[package]] |
|
390 | 465 | name = "winapi-x86_64-pc-windows-gnu" |
|
391 | 466 | version = "0.4.0" |
|
392 | 467 | source = "registry+https://github.com/rust-lang/crates.io-index" |
|
393 | 468 | |
|
394 | 469 | [metadata] |
|
395 | 470 | "checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" |
|
396 | 471 | "checksum arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" |
|
397 | 472 | "checksum autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875" |
|
398 | 473 | "checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" |
|
399 | 474 | "checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" |
|
475 | "checksum c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7d64d04786e0f528460fc884753cf8dddcc466be308f6026f8e355c41a0e4101" | |
|
400 | 476 | "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" |
|
401 | 477 | "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" |
|
402 | 478 | "checksum cpython 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "85532c648315aeb0829ad216a6a29aa3212cf9319bc7f6daf1404aa0bdd1485f" |
|
403 | 479 | "checksum crossbeam-deque 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b18cd2e169ad86297e6bc0ad9aa679aee9daa4f19e8163860faf7c164e4f5a71" |
|
404 | 480 | "checksum crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fedcd6772e37f3da2a9af9bf12ebe046c0dfe657992377b4df982a2b54cd37a9" |
|
405 | 481 | "checksum crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b" |
|
406 | 482 | "checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6" |
|
407 | 483 | "checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" |
|
408 | 484 | "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" |
|
485 | "checksum getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "473a1265acc8ff1e808cd0a1af8cee3c2ee5200916058a2ca113c29f2d903571" | |
|
409 | 486 | "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" |
|
410 | 487 | "checksum libc 0.2.64 (registry+https://github.com/rust-lang/crates.io-index)" = "74dfca3d9957906e8d1e6a0b641dc9a59848e793f1da2165889fd4f62d10d79c" |
|
411 | 488 | "checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e" |
|
412 | 489 | "checksum memoffset 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ce6075db033bbbb7ee5a0bbd3a3186bbae616f57fb001c485c7ff77955f8177f" |
|
413 | 490 | "checksum nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" |
|
414 | 491 | "checksum num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "6ba9a427cfca2be13aa6f6403b0b7e7368fe982bfa16fccc450ce74c46cd9b32" |
|
415 | 492 | "checksum num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bcef43580c035376c0705c42792c294b66974abbfd2789b511784023f71f3273" |
|
493 | "checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b" | |
|
416 | 494 | "checksum python27-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "372555e88a6bc8109eb641380240dc8d25a128fc48363ec9075664daadffdd5b" |
|
417 | 495 | "checksum python3-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f3a8ebed3f1201fda179f3960609dbbc10cd8c75e9f2afcb03788278f367d8ea" |
|
418 | 496 | "checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" |
|
497 | "checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412" | |
|
419 | 498 | "checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" |
|
499 | "checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" | |
|
420 | 500 | "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" |
|
421 | 501 | "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" |
|
502 | "checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" | |
|
422 | 503 | "checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" |
|
504 | "checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" | |
|
423 | 505 | "checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" |
|
424 | 506 | "checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" |
|
425 | 507 | "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" |
|
426 | 508 | "checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" |
|
427 | 509 | "checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" |
|
428 | 510 | "checksum rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "83a27732a533a1be0a0035a111fe76db89ad312f6f0347004c220c57f209a123" |
|
429 | 511 | "checksum rayon-core 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "98dcf634205083b17d0861252431eb2acbfb698ab7478a2d20de07954f47ec7b" |
|
430 | 512 | "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" |
|
431 | 513 | "checksum regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd" |
|
432 | 514 | "checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716" |
|
433 | 515 | "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" |
|
434 | 516 | "checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" |
|
435 | 517 | "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" |
|
436 | 518 | "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" |
|
437 | 519 | "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" |
|
520 | "checksum twox-hash 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bfd5b7557925ce778ff9b9ef90e3ade34c524b5ff10e239c69a42d546d2af56" | |
|
521 | "checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d" | |
|
438 | 522 | "checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" |
|
439 | 523 | "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" |
|
440 | 524 | "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" |
@@ -1,18 +1,19 | |||
|
1 | 1 | [package] |
|
2 | 2 | name = "hg-core" |
|
3 | 3 | version = "0.1.0" |
|
4 | 4 | authors = ["Georges Racinet <gracinet@anybox.fr>"] |
|
5 | 5 | description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)" |
|
6 | 6 | edition = "2018" |
|
7 | 7 | |
|
8 | 8 | [lib] |
|
9 | 9 | name = "hg" |
|
10 | 10 | |
|
11 | 11 | [dependencies] |
|
12 | 12 | byteorder = "1.3.1" |
|
13 | 13 | lazy_static = "1.3.0" |
|
14 | 14 | memchr = "2.2.0" |
|
15 | 15 | rand = "0.6.5" |
|
16 | 16 | rand_pcg = "0.1.1" |
|
17 | rayon = "1.2.0" | |
|
17 | 18 | regex = "1.1.0" |
|
18 | rayon = "1.2.0" | |
|
19 | twox-hash = "1.5.0" |
@@ -1,82 +1,81 | |||
|
1 | 1 | // dirstate module |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | use crate::{utils::hg_path::HgPathBuf, DirstateParseError}; | |
|
8 | use crate::{utils::hg_path::HgPathBuf, DirstateParseError, FastHashMap}; | |
|
9 | 9 | use std::collections::hash_map; |
|
10 | use std::collections::HashMap; | |
|
11 | 10 | use std::convert::TryFrom; |
|
12 | 11 | |
|
13 | 12 | pub mod dirs_multiset; |
|
14 | 13 | pub mod dirstate_map; |
|
15 | 14 | pub mod parsers; |
|
16 | 15 | pub mod status; |
|
17 | 16 | |
|
18 | 17 | #[derive(Debug, PartialEq, Clone)] |
|
19 | 18 | pub struct DirstateParents { |
|
20 | 19 | pub p1: [u8; 20], |
|
21 | 20 | pub p2: [u8; 20], |
|
22 | 21 | } |
|
23 | 22 | |
|
24 | 23 | /// The C implementation uses all signed types. This will be an issue |
|
25 | 24 | /// either when 4GB+ source files are commonplace or in 2038, whichever |
|
26 | 25 | /// comes first. |
|
27 | 26 | #[derive(Debug, PartialEq, Copy, Clone)] |
|
28 | 27 | pub struct DirstateEntry { |
|
29 | 28 | pub state: EntryState, |
|
30 | 29 | pub mode: i32, |
|
31 | 30 | pub mtime: i32, |
|
32 | 31 | pub size: i32, |
|
33 | 32 | } |
|
34 | 33 | |
|
35 | 34 | /// A `DirstateEntry` with a size of `-2` means that it was merged from the |
|
36 | 35 | /// other parent. This allows revert to pick the right status back during a |
|
37 | 36 | /// merge. |
|
38 | 37 | pub const SIZE_FROM_OTHER_PARENT: i32 = -2; |
|
39 | 38 | |
|
40 | pub type StateMap = HashMap<HgPathBuf, DirstateEntry>; | |
|
39 | pub type StateMap = FastHashMap<HgPathBuf, DirstateEntry>; | |
|
41 | 40 | pub type StateMapIter<'a> = hash_map::Iter<'a, HgPathBuf, DirstateEntry>; |
|
42 | pub type CopyMap = HashMap<HgPathBuf, HgPathBuf>; | |
|
41 | pub type CopyMap = FastHashMap<HgPathBuf, HgPathBuf>; | |
|
43 | 42 | pub type CopyMapIter<'a> = hash_map::Iter<'a, HgPathBuf, HgPathBuf>; |
|
44 | 43 | |
|
45 | 44 | #[derive(Copy, Clone, Debug, Eq, PartialEq)] |
|
46 | 45 | pub enum EntryState { |
|
47 | 46 | Normal, |
|
48 | 47 | Added, |
|
49 | 48 | Removed, |
|
50 | 49 | Merged, |
|
51 | 50 | Unknown, |
|
52 | 51 | } |
|
53 | 52 | |
|
54 | 53 | impl TryFrom<u8> for EntryState { |
|
55 | 54 | type Error = DirstateParseError; |
|
56 | 55 | |
|
57 | 56 | fn try_from(value: u8) -> Result<Self, Self::Error> { |
|
58 | 57 | match value { |
|
59 | 58 | b'n' => Ok(EntryState::Normal), |
|
60 | 59 | b'a' => Ok(EntryState::Added), |
|
61 | 60 | b'r' => Ok(EntryState::Removed), |
|
62 | 61 | b'm' => Ok(EntryState::Merged), |
|
63 | 62 | b'?' => Ok(EntryState::Unknown), |
|
64 | 63 | _ => Err(DirstateParseError::CorruptedEntry(format!( |
|
65 | 64 | "Incorrect entry state {}", |
|
66 | 65 | value |
|
67 | 66 | ))), |
|
68 | 67 | } |
|
69 | 68 | } |
|
70 | 69 | } |
|
71 | 70 | |
|
72 | 71 | impl Into<u8> for EntryState { |
|
73 | 72 | fn into(self) -> u8 { |
|
74 | 73 | match self { |
|
75 | 74 | EntryState::Normal => b'n', |
|
76 | 75 | EntryState::Added => b'a', |
|
77 | 76 | EntryState::Removed => b'r', |
|
78 | 77 | EntryState::Merged => b'm', |
|
79 | 78 | EntryState::Unknown => b'?', |
|
80 | 79 | } |
|
81 | 80 | } |
|
82 | 81 | } |
@@ -1,335 +1,334 | |||
|
1 | 1 | // dirs_multiset.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | //! A multiset of directory names. |
|
9 | 9 | //! |
|
10 | 10 | //! Used to counts the references to directories in a manifest or dirstate. |
|
11 | 11 | use crate::utils::hg_path::{HgPath, HgPathBuf}; |
|
12 | 12 | use crate::{ |
|
13 | 13 | dirstate::EntryState, utils::files, DirstateEntry, DirstateMapError, |
|
14 | FastHashMap, | |
|
14 | 15 | }; |
|
15 | 16 | use std::collections::hash_map::{self, Entry}; |
|
16 | use std::collections::HashMap; | |
|
17 | 17 | |
|
18 | 18 | // could be encapsulated if we care API stability more seriously |
|
19 | 19 | pub type DirsMultisetIter<'a> = hash_map::Keys<'a, HgPathBuf, u32>; |
|
20 | 20 | |
|
21 | 21 | #[derive(PartialEq, Debug)] |
|
22 | 22 | pub struct DirsMultiset { |
|
23 | inner: HashMap<HgPathBuf, u32>, | |
|
23 | inner: FastHashMap<HgPathBuf, u32>, | |
|
24 | 24 | } |
|
25 | 25 | |
|
26 | 26 | impl DirsMultiset { |
|
27 | 27 | /// Initializes the multiset from a dirstate. |
|
28 | 28 | /// |
|
29 | 29 | /// If `skip_state` is provided, skips dirstate entries with equal state. |
|
30 | 30 | pub fn from_dirstate( |
|
31 | vec: &HashMap<HgPathBuf, DirstateEntry>, | |
|
31 | vec: &FastHashMap<HgPathBuf, DirstateEntry>, | |
|
32 | 32 | skip_state: Option<EntryState>, |
|
33 | 33 | ) -> Self { |
|
34 | 34 | let mut multiset = DirsMultiset { |
|
35 |
inner: HashMap:: |
|
|
35 | inner: FastHashMap::default(), | |
|
36 | 36 | }; |
|
37 | 37 | |
|
38 | 38 | for (filename, DirstateEntry { state, .. }) in vec { |
|
39 | 39 | // This `if` is optimized out of the loop |
|
40 | 40 | if let Some(skip) = skip_state { |
|
41 | 41 | if skip != *state { |
|
42 | 42 | multiset.add_path(filename); |
|
43 | 43 | } |
|
44 | 44 | } else { |
|
45 | 45 | multiset.add_path(filename); |
|
46 | 46 | } |
|
47 | 47 | } |
|
48 | 48 | |
|
49 | 49 | multiset |
|
50 | 50 | } |
|
51 | 51 | |
|
52 | 52 | /// Initializes the multiset from a manifest. |
|
53 | 53 | pub fn from_manifest(vec: &Vec<HgPathBuf>) -> Self { |
|
54 | 54 | let mut multiset = DirsMultiset { |
|
55 |
inner: HashMap:: |
|
|
55 | inner: FastHashMap::default(), | |
|
56 | 56 | }; |
|
57 | 57 | |
|
58 | 58 | for filename in vec { |
|
59 | 59 | multiset.add_path(filename); |
|
60 | 60 | } |
|
61 | 61 | |
|
62 | 62 | multiset |
|
63 | 63 | } |
|
64 | 64 | |
|
65 | 65 | /// Increases the count of deepest directory contained in the path. |
|
66 | 66 | /// |
|
67 | 67 | /// If the directory is not yet in the map, adds its parents. |
|
68 | 68 | pub fn add_path(&mut self, path: &HgPath) -> Result<(), DirstateMapError> { |
|
69 | 69 | for subpath in files::find_dirs(path) { |
|
70 | 70 | if subpath.as_bytes().last() == Some(&b'/') { |
|
71 | 71 | // TODO Remove this once PathAuditor is certified |
|
72 | 72 | // as the only entrypoint for path data |
|
73 | 73 | return Err(DirstateMapError::ConsecutiveSlashes); |
|
74 | 74 | } |
|
75 | 75 | if let Some(val) = self.inner.get_mut(subpath) { |
|
76 | 76 | *val += 1; |
|
77 | 77 | break; |
|
78 | 78 | } |
|
79 | 79 | self.inner.insert(subpath.to_owned(), 1); |
|
80 | 80 | } |
|
81 | 81 | Ok(()) |
|
82 | 82 | } |
|
83 | 83 | |
|
84 | 84 | /// Decreases the count of deepest directory contained in the path. |
|
85 | 85 | /// |
|
86 | 86 | /// If it is the only reference, decreases all parents until one is |
|
87 | 87 | /// removed. |
|
88 | 88 | /// If the directory is not in the map, something horrible has happened. |
|
89 | 89 | pub fn delete_path( |
|
90 | 90 | &mut self, |
|
91 | 91 | path: &HgPath, |
|
92 | 92 | ) -> Result<(), DirstateMapError> { |
|
93 | 93 | for subpath in files::find_dirs(path) { |
|
94 | 94 | match self.inner.entry(subpath.to_owned()) { |
|
95 | 95 | Entry::Occupied(mut entry) => { |
|
96 | 96 | let val = entry.get().clone(); |
|
97 | 97 | if val > 1 { |
|
98 | 98 | entry.insert(val - 1); |
|
99 | 99 | break; |
|
100 | 100 | } |
|
101 | 101 | entry.remove(); |
|
102 | 102 | } |
|
103 | 103 | Entry::Vacant(_) => { |
|
104 | 104 | return Err(DirstateMapError::PathNotFound( |
|
105 | 105 | path.to_owned(), |
|
106 | 106 | )) |
|
107 | 107 | } |
|
108 | 108 | }; |
|
109 | 109 | } |
|
110 | 110 | |
|
111 | 111 | Ok(()) |
|
112 | 112 | } |
|
113 | 113 | |
|
114 | 114 | pub fn contains(&self, key: &HgPath) -> bool { |
|
115 | 115 | self.inner.contains_key(key) |
|
116 | 116 | } |
|
117 | 117 | |
|
118 | 118 | pub fn iter(&self) -> DirsMultisetIter { |
|
119 | 119 | self.inner.keys() |
|
120 | 120 | } |
|
121 | 121 | |
|
122 | 122 | pub fn len(&self) -> usize { |
|
123 | 123 | self.inner.len() |
|
124 | 124 | } |
|
125 | 125 | } |
|
126 | 126 | |
|
127 | 127 | #[cfg(test)] |
|
128 | 128 | mod tests { |
|
129 | 129 | use super::*; |
|
130 | use std::collections::HashMap; | |
|
131 | 130 | |
|
132 | 131 | #[test] |
|
133 | 132 | fn test_delete_path_path_not_found() { |
|
134 | 133 | let mut map = DirsMultiset::from_manifest(&vec![]); |
|
135 | 134 | let path = HgPathBuf::from_bytes(b"doesnotexist/"); |
|
136 | 135 | assert_eq!( |
|
137 | 136 | Err(DirstateMapError::PathNotFound(path.to_owned())), |
|
138 | 137 | map.delete_path(&path) |
|
139 | 138 | ); |
|
140 | 139 | } |
|
141 | 140 | |
|
142 | 141 | #[test] |
|
143 | 142 | fn test_delete_path_empty_path() { |
|
144 | 143 | let mut map = DirsMultiset::from_manifest(&vec![HgPathBuf::new()]); |
|
145 | 144 | let path = HgPath::new(b""); |
|
146 | 145 | assert_eq!(Ok(()), map.delete_path(path)); |
|
147 | 146 | assert_eq!( |
|
148 | 147 | Err(DirstateMapError::PathNotFound(path.to_owned())), |
|
149 | 148 | map.delete_path(path) |
|
150 | 149 | ); |
|
151 | 150 | } |
|
152 | 151 | |
|
153 | 152 | #[test] |
|
154 | 153 | fn test_delete_path_successful() { |
|
155 | 154 | let mut map = DirsMultiset { |
|
156 | 155 | inner: [("", 5), ("a", 3), ("a/b", 2), ("a/c", 1)] |
|
157 | 156 | .iter() |
|
158 | 157 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
159 | 158 | .collect(), |
|
160 | 159 | }; |
|
161 | 160 | |
|
162 | 161 | assert_eq!(Ok(()), map.delete_path(HgPath::new(b"a/b/"))); |
|
163 | 162 | eprintln!("{:?}", map); |
|
164 | 163 | assert_eq!(Ok(()), map.delete_path(HgPath::new(b"a/b/"))); |
|
165 | 164 | eprintln!("{:?}", map); |
|
166 | 165 | assert_eq!( |
|
167 | 166 | Err(DirstateMapError::PathNotFound(HgPathBuf::from_bytes( |
|
168 | 167 | b"a/b/" |
|
169 | 168 | ))), |
|
170 | 169 | map.delete_path(HgPath::new(b"a/b/")) |
|
171 | 170 | ); |
|
172 | 171 | |
|
173 | 172 | assert_eq!(2, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
174 | 173 | assert_eq!(1, *map.inner.get(HgPath::new(b"a/c")).unwrap()); |
|
175 | 174 | eprintln!("{:?}", map); |
|
176 | 175 | assert_eq!(Ok(()), map.delete_path(HgPath::new(b"a/"))); |
|
177 | 176 | eprintln!("{:?}", map); |
|
178 | 177 | |
|
179 | 178 | assert_eq!(Ok(()), map.delete_path(HgPath::new(b"a/c/"))); |
|
180 | 179 | assert_eq!( |
|
181 | 180 | Err(DirstateMapError::PathNotFound(HgPathBuf::from_bytes( |
|
182 | 181 | b"a/c/" |
|
183 | 182 | ))), |
|
184 | 183 | map.delete_path(HgPath::new(b"a/c/")) |
|
185 | 184 | ); |
|
186 | 185 | } |
|
187 | 186 | |
|
188 | 187 | #[test] |
|
189 | 188 | fn test_add_path_empty_path() { |
|
190 | 189 | let mut map = DirsMultiset::from_manifest(&vec![]); |
|
191 | 190 | let path = HgPath::new(b""); |
|
192 | 191 | map.add_path(path); |
|
193 | 192 | |
|
194 | 193 | assert_eq!(1, map.len()); |
|
195 | 194 | } |
|
196 | 195 | |
|
197 | 196 | #[test] |
|
198 | 197 | fn test_add_path_successful() { |
|
199 | 198 | let mut map = DirsMultiset::from_manifest(&vec![]); |
|
200 | 199 | |
|
201 | 200 | map.add_path(HgPath::new(b"a/")); |
|
202 | 201 | assert_eq!(1, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
203 | 202 | assert_eq!(1, *map.inner.get(HgPath::new(b"")).unwrap()); |
|
204 | 203 | assert_eq!(2, map.len()); |
|
205 | 204 | |
|
206 | 205 | // Non directory should be ignored |
|
207 | 206 | map.add_path(HgPath::new(b"a")); |
|
208 | 207 | assert_eq!(1, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
209 | 208 | assert_eq!(2, map.len()); |
|
210 | 209 | |
|
211 | 210 | // Non directory will still add its base |
|
212 | 211 | map.add_path(HgPath::new(b"a/b")); |
|
213 | 212 | assert_eq!(2, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
214 | 213 | assert_eq!(2, map.len()); |
|
215 | 214 | |
|
216 | 215 | // Duplicate path works |
|
217 | 216 | map.add_path(HgPath::new(b"a/")); |
|
218 | 217 | assert_eq!(3, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
219 | 218 | |
|
220 | 219 | // Nested dir adds to its base |
|
221 | 220 | map.add_path(HgPath::new(b"a/b/")); |
|
222 | 221 | assert_eq!(4, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
223 | 222 | assert_eq!(1, *map.inner.get(HgPath::new(b"a/b")).unwrap()); |
|
224 | 223 | |
|
225 | 224 | // but not its base's base, because it already existed |
|
226 | 225 | map.add_path(HgPath::new(b"a/b/c/")); |
|
227 | 226 | assert_eq!(4, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
228 | 227 | assert_eq!(2, *map.inner.get(HgPath::new(b"a/b")).unwrap()); |
|
229 | 228 | |
|
230 | 229 | map.add_path(HgPath::new(b"a/c/")); |
|
231 | 230 | assert_eq!(1, *map.inner.get(HgPath::new(b"a/c")).unwrap()); |
|
232 | 231 | |
|
233 | 232 | let expected = DirsMultiset { |
|
234 | 233 | inner: [("", 2), ("a", 5), ("a/b", 2), ("a/b/c", 1), ("a/c", 1)] |
|
235 | 234 | .iter() |
|
236 | 235 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
237 | 236 | .collect(), |
|
238 | 237 | }; |
|
239 | 238 | assert_eq!(map, expected); |
|
240 | 239 | } |
|
241 | 240 | |
|
242 | 241 | #[test] |
|
243 | 242 | fn test_dirsmultiset_new_empty() { |
|
244 | 243 | let new = DirsMultiset::from_manifest(&vec![]); |
|
245 | 244 | let expected = DirsMultiset { |
|
246 |
inner: HashMap:: |
|
|
245 | inner: FastHashMap::default(), | |
|
247 | 246 | }; |
|
248 | 247 | assert_eq!(expected, new); |
|
249 | 248 | |
|
250 |
let new = DirsMultiset::from_dirstate(&HashMap:: |
|
|
249 | let new = DirsMultiset::from_dirstate(&FastHashMap::default(), None); | |
|
251 | 250 | let expected = DirsMultiset { |
|
252 |
inner: HashMap:: |
|
|
251 | inner: FastHashMap::default(), | |
|
253 | 252 | }; |
|
254 | 253 | assert_eq!(expected, new); |
|
255 | 254 | } |
|
256 | 255 | |
|
257 | 256 | #[test] |
|
258 | 257 | fn test_dirsmultiset_new_no_skip() { |
|
259 | 258 | let input_vec = ["a/", "b/", "a/c", "a/d/"] |
|
260 | 259 | .iter() |
|
261 | 260 | .map(|e| HgPathBuf::from_bytes(e.as_bytes())) |
|
262 | 261 | .collect(); |
|
263 | 262 | let expected_inner = [("", 2), ("a", 3), ("b", 1), ("a/d", 1)] |
|
264 | 263 | .iter() |
|
265 | 264 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
266 | 265 | .collect(); |
|
267 | 266 | |
|
268 | 267 | let new = DirsMultiset::from_manifest(&input_vec); |
|
269 | 268 | let expected = DirsMultiset { |
|
270 | 269 | inner: expected_inner, |
|
271 | 270 | }; |
|
272 | 271 | assert_eq!(expected, new); |
|
273 | 272 | |
|
274 | 273 | let input_map = ["a/", "b/", "a/c", "a/d/"] |
|
275 | 274 | .iter() |
|
276 | 275 | .map(|f| { |
|
277 | 276 | ( |
|
278 | 277 | HgPathBuf::from_bytes(f.as_bytes()), |
|
279 | 278 | DirstateEntry { |
|
280 | 279 | state: EntryState::Normal, |
|
281 | 280 | mode: 0, |
|
282 | 281 | mtime: 0, |
|
283 | 282 | size: 0, |
|
284 | 283 | }, |
|
285 | 284 | ) |
|
286 | 285 | }) |
|
287 | 286 | .collect(); |
|
288 | 287 | let expected_inner = [("", 2), ("a", 3), ("b", 1), ("a/d", 1)] |
|
289 | 288 | .iter() |
|
290 | 289 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
291 | 290 | .collect(); |
|
292 | 291 | |
|
293 | 292 | let new = DirsMultiset::from_dirstate(&input_map, None); |
|
294 | 293 | let expected = DirsMultiset { |
|
295 | 294 | inner: expected_inner, |
|
296 | 295 | }; |
|
297 | 296 | assert_eq!(expected, new); |
|
298 | 297 | } |
|
299 | 298 | |
|
300 | 299 | #[test] |
|
301 | 300 | fn test_dirsmultiset_new_skip() { |
|
302 | 301 | let input_map = [ |
|
303 | 302 | ("a/", EntryState::Normal), |
|
304 | 303 | ("a/b/", EntryState::Normal), |
|
305 | 304 | ("a/c", EntryState::Removed), |
|
306 | 305 | ("a/d/", EntryState::Merged), |
|
307 | 306 | ] |
|
308 | 307 | .iter() |
|
309 | 308 | .map(|(f, state)| { |
|
310 | 309 | ( |
|
311 | 310 | HgPathBuf::from_bytes(f.as_bytes()), |
|
312 | 311 | DirstateEntry { |
|
313 | 312 | state: *state, |
|
314 | 313 | mode: 0, |
|
315 | 314 | mtime: 0, |
|
316 | 315 | size: 0, |
|
317 | 316 | }, |
|
318 | 317 | ) |
|
319 | 318 | }) |
|
320 | 319 | .collect(); |
|
321 | 320 | |
|
322 | 321 | // "a" incremented with "a/c" and "a/d/" |
|
323 | 322 | let expected_inner = [("", 1), ("a", 2), ("a/d", 1)] |
|
324 | 323 | .iter() |
|
325 | 324 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
326 | 325 | .collect(); |
|
327 | 326 | |
|
328 | 327 | let new = |
|
329 | 328 | DirsMultiset::from_dirstate(&input_map, Some(EntryState::Normal)); |
|
330 | 329 | let expected = DirsMultiset { |
|
331 | 330 | inner: expected_inner, |
|
332 | 331 | }; |
|
333 | 332 | assert_eq!(expected, new); |
|
334 | 333 | } |
|
335 | 334 | } |
@@ -1,427 +1,427 | |||
|
1 | 1 | // dirstate_map.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | use crate::{ |
|
9 | 9 | dirstate::{parsers::PARENT_SIZE, EntryState, SIZE_FROM_OTHER_PARENT}, |
|
10 | 10 | pack_dirstate, parse_dirstate, |
|
11 | 11 | utils::{ |
|
12 | 12 | files::normalize_case, |
|
13 | 13 | hg_path::{HgPath, HgPathBuf}, |
|
14 | 14 | }, |
|
15 | 15 | CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateMapError, |
|
16 | DirstateParents, DirstateParseError, StateMap, | |
|
16 | DirstateParents, DirstateParseError, FastHashMap, StateMap, | |
|
17 | 17 | }; |
|
18 | 18 | use core::borrow::Borrow; |
|
19 |
use std::collections:: |
|
|
19 | use std::collections::HashSet; | |
|
20 | 20 | use std::convert::TryInto; |
|
21 | 21 | use std::iter::FromIterator; |
|
22 | 22 | use std::ops::Deref; |
|
23 | 23 | use std::time::Duration; |
|
24 | 24 | |
|
25 | pub type FileFoldMap = HashMap<HgPathBuf, HgPathBuf>; | |
|
25 | pub type FileFoldMap = FastHashMap<HgPathBuf, HgPathBuf>; | |
|
26 | 26 | |
|
27 | 27 | const NULL_ID: [u8; 20] = [0; 20]; |
|
28 | 28 | const MTIME_UNSET: i32 = -1; |
|
29 | 29 | |
|
30 | 30 | #[derive(Default)] |
|
31 | 31 | pub struct DirstateMap { |
|
32 | 32 | state_map: StateMap, |
|
33 | 33 | pub copy_map: CopyMap, |
|
34 | 34 | file_fold_map: Option<FileFoldMap>, |
|
35 | 35 | pub dirs: Option<DirsMultiset>, |
|
36 | 36 | pub all_dirs: Option<DirsMultiset>, |
|
37 | 37 | non_normal_set: HashSet<HgPathBuf>, |
|
38 | 38 | other_parent_set: HashSet<HgPathBuf>, |
|
39 | 39 | parents: Option<DirstateParents>, |
|
40 | 40 | dirty_parents: bool, |
|
41 | 41 | } |
|
42 | 42 | |
|
43 | 43 | /// Should only really be used in python interface code, for clarity |
|
44 | 44 | impl Deref for DirstateMap { |
|
45 | 45 | type Target = StateMap; |
|
46 | 46 | |
|
47 | 47 | fn deref(&self) -> &Self::Target { |
|
48 | 48 | &self.state_map |
|
49 | 49 | } |
|
50 | 50 | } |
|
51 | 51 | |
|
52 | 52 | impl FromIterator<(HgPathBuf, DirstateEntry)> for DirstateMap { |
|
53 | 53 | fn from_iter<I: IntoIterator<Item = (HgPathBuf, DirstateEntry)>>( |
|
54 | 54 | iter: I, |
|
55 | 55 | ) -> Self { |
|
56 | 56 | Self { |
|
57 | 57 | state_map: iter.into_iter().collect(), |
|
58 | 58 | ..Self::default() |
|
59 | 59 | } |
|
60 | 60 | } |
|
61 | 61 | } |
|
62 | 62 | |
|
63 | 63 | impl DirstateMap { |
|
64 | 64 | pub fn new() -> Self { |
|
65 | 65 | Self::default() |
|
66 | 66 | } |
|
67 | 67 | |
|
68 | 68 | pub fn clear(&mut self) { |
|
69 | 69 | self.state_map.clear(); |
|
70 | 70 | self.copy_map.clear(); |
|
71 | 71 | self.file_fold_map = None; |
|
72 | 72 | self.non_normal_set.clear(); |
|
73 | 73 | self.other_parent_set.clear(); |
|
74 | 74 | self.set_parents(&DirstateParents { |
|
75 | 75 | p1: NULL_ID, |
|
76 | 76 | p2: NULL_ID, |
|
77 | 77 | }) |
|
78 | 78 | } |
|
79 | 79 | |
|
80 | 80 | /// Add a tracked file to the dirstate |
|
81 | 81 | pub fn add_file( |
|
82 | 82 | &mut self, |
|
83 | 83 | filename: &HgPath, |
|
84 | 84 | old_state: EntryState, |
|
85 | 85 | entry: DirstateEntry, |
|
86 | 86 | ) -> Result<(), DirstateMapError> { |
|
87 | 87 | if old_state == EntryState::Unknown || old_state == EntryState::Removed |
|
88 | 88 | { |
|
89 | 89 | if let Some(ref mut dirs) = self.dirs { |
|
90 | 90 | dirs.add_path(filename)?; |
|
91 | 91 | } |
|
92 | 92 | } |
|
93 | 93 | if old_state == EntryState::Unknown { |
|
94 | 94 | if let Some(ref mut all_dirs) = self.all_dirs { |
|
95 | 95 | all_dirs.add_path(filename)?; |
|
96 | 96 | } |
|
97 | 97 | } |
|
98 | 98 | self.state_map.insert(filename.to_owned(), entry.to_owned()); |
|
99 | 99 | |
|
100 | 100 | if entry.state != EntryState::Normal || entry.mtime == MTIME_UNSET { |
|
101 | 101 | self.non_normal_set.insert(filename.to_owned()); |
|
102 | 102 | } |
|
103 | 103 | |
|
104 | 104 | if entry.size == SIZE_FROM_OTHER_PARENT { |
|
105 | 105 | self.other_parent_set.insert(filename.to_owned()); |
|
106 | 106 | } |
|
107 | 107 | Ok(()) |
|
108 | 108 | } |
|
109 | 109 | |
|
110 | 110 | /// Mark a file as removed in the dirstate. |
|
111 | 111 | /// |
|
112 | 112 | /// The `size` parameter is used to store sentinel values that indicate |
|
113 | 113 | /// the file's previous state. In the future, we should refactor this |
|
114 | 114 | /// to be more explicit about what that state is. |
|
115 | 115 | pub fn remove_file( |
|
116 | 116 | &mut self, |
|
117 | 117 | filename: &HgPath, |
|
118 | 118 | old_state: EntryState, |
|
119 | 119 | size: i32, |
|
120 | 120 | ) -> Result<(), DirstateMapError> { |
|
121 | 121 | if old_state != EntryState::Unknown && old_state != EntryState::Removed |
|
122 | 122 | { |
|
123 | 123 | if let Some(ref mut dirs) = self.dirs { |
|
124 | 124 | dirs.delete_path(filename)?; |
|
125 | 125 | } |
|
126 | 126 | } |
|
127 | 127 | if old_state == EntryState::Unknown { |
|
128 | 128 | if let Some(ref mut all_dirs) = self.all_dirs { |
|
129 | 129 | all_dirs.add_path(filename); |
|
130 | 130 | } |
|
131 | 131 | } |
|
132 | 132 | |
|
133 | 133 | if let Some(ref mut file_fold_map) = self.file_fold_map { |
|
134 | 134 | file_fold_map.remove(&normalize_case(filename)); |
|
135 | 135 | } |
|
136 | 136 | self.state_map.insert( |
|
137 | 137 | filename.to_owned(), |
|
138 | 138 | DirstateEntry { |
|
139 | 139 | state: EntryState::Removed, |
|
140 | 140 | mode: 0, |
|
141 | 141 | size, |
|
142 | 142 | mtime: 0, |
|
143 | 143 | }, |
|
144 | 144 | ); |
|
145 | 145 | self.non_normal_set.insert(filename.to_owned()); |
|
146 | 146 | Ok(()) |
|
147 | 147 | } |
|
148 | 148 | |
|
149 | 149 | /// Remove a file from the dirstate. |
|
150 | 150 | /// Returns `true` if the file was previously recorded. |
|
151 | 151 | pub fn drop_file( |
|
152 | 152 | &mut self, |
|
153 | 153 | filename: &HgPath, |
|
154 | 154 | old_state: EntryState, |
|
155 | 155 | ) -> Result<bool, DirstateMapError> { |
|
156 | 156 | let exists = self.state_map.remove(filename).is_some(); |
|
157 | 157 | |
|
158 | 158 | if exists { |
|
159 | 159 | if old_state != EntryState::Removed { |
|
160 | 160 | if let Some(ref mut dirs) = self.dirs { |
|
161 | 161 | dirs.delete_path(filename)?; |
|
162 | 162 | } |
|
163 | 163 | } |
|
164 | 164 | if let Some(ref mut all_dirs) = self.all_dirs { |
|
165 | 165 | all_dirs.delete_path(filename)?; |
|
166 | 166 | } |
|
167 | 167 | } |
|
168 | 168 | if let Some(ref mut file_fold_map) = self.file_fold_map { |
|
169 | 169 | file_fold_map.remove(&normalize_case(filename)); |
|
170 | 170 | } |
|
171 | 171 | self.non_normal_set.remove(filename); |
|
172 | 172 | |
|
173 | 173 | Ok(exists) |
|
174 | 174 | } |
|
175 | 175 | |
|
176 | 176 | pub fn clear_ambiguous_times( |
|
177 | 177 | &mut self, |
|
178 | 178 | filenames: Vec<HgPathBuf>, |
|
179 | 179 | now: i32, |
|
180 | 180 | ) { |
|
181 | 181 | for filename in filenames { |
|
182 | 182 | let mut changed = false; |
|
183 | 183 | self.state_map |
|
184 | 184 | .entry(filename.to_owned()) |
|
185 | 185 | .and_modify(|entry| { |
|
186 | 186 | if entry.state == EntryState::Normal && entry.mtime == now |
|
187 | 187 | { |
|
188 | 188 | changed = true; |
|
189 | 189 | *entry = DirstateEntry { |
|
190 | 190 | mtime: MTIME_UNSET, |
|
191 | 191 | ..*entry |
|
192 | 192 | }; |
|
193 | 193 | } |
|
194 | 194 | }); |
|
195 | 195 | if changed { |
|
196 | 196 | self.non_normal_set.insert(filename.to_owned()); |
|
197 | 197 | } |
|
198 | 198 | } |
|
199 | 199 | } |
|
200 | 200 | |
|
201 | 201 | pub fn non_normal_other_parent_entries( |
|
202 | 202 | &self, |
|
203 | 203 | ) -> (HashSet<HgPathBuf>, HashSet<HgPathBuf>) { |
|
204 | 204 | let mut non_normal = HashSet::new(); |
|
205 | 205 | let mut other_parent = HashSet::new(); |
|
206 | 206 | |
|
207 | 207 | for ( |
|
208 | 208 | filename, |
|
209 | 209 | DirstateEntry { |
|
210 | 210 | state, size, mtime, .. |
|
211 | 211 | }, |
|
212 | 212 | ) in self.state_map.iter() |
|
213 | 213 | { |
|
214 | 214 | if *state != EntryState::Normal || *mtime == MTIME_UNSET { |
|
215 | 215 | non_normal.insert(filename.to_owned()); |
|
216 | 216 | } |
|
217 | 217 | if *state == EntryState::Normal && *size == SIZE_FROM_OTHER_PARENT |
|
218 | 218 | { |
|
219 | 219 | other_parent.insert(filename.to_owned()); |
|
220 | 220 | } |
|
221 | 221 | } |
|
222 | 222 | |
|
223 | 223 | (non_normal, other_parent) |
|
224 | 224 | } |
|
225 | 225 | |
|
226 | 226 | /// Both of these setters and their uses appear to be the simplest way to |
|
227 | 227 | /// emulate a Python lazy property, but it is ugly and unidiomatic. |
|
228 | 228 | /// TODO One day, rewriting this struct using the typestate might be a |
|
229 | 229 | /// good idea. |
|
230 | 230 | pub fn set_all_dirs(&mut self) { |
|
231 | 231 | if self.all_dirs.is_none() { |
|
232 | 232 | self.all_dirs = |
|
233 | 233 | Some(DirsMultiset::from_dirstate(&self.state_map, None)); |
|
234 | 234 | } |
|
235 | 235 | } |
|
236 | 236 | |
|
237 | 237 | pub fn set_dirs(&mut self) { |
|
238 | 238 | if self.dirs.is_none() { |
|
239 | 239 | self.dirs = Some(DirsMultiset::from_dirstate( |
|
240 | 240 | &self.state_map, |
|
241 | 241 | Some(EntryState::Removed), |
|
242 | 242 | )); |
|
243 | 243 | } |
|
244 | 244 | } |
|
245 | 245 | |
|
246 | 246 | pub fn has_tracked_dir(&mut self, directory: &HgPath) -> bool { |
|
247 | 247 | self.set_dirs(); |
|
248 | 248 | self.dirs.as_ref().unwrap().contains(directory) |
|
249 | 249 | } |
|
250 | 250 | |
|
251 | 251 | pub fn has_dir(&mut self, directory: &HgPath) -> bool { |
|
252 | 252 | self.set_all_dirs(); |
|
253 | 253 | self.all_dirs.as_ref().unwrap().contains(directory) |
|
254 | 254 | } |
|
255 | 255 | |
|
256 | 256 | pub fn parents( |
|
257 | 257 | &mut self, |
|
258 | 258 | file_contents: &[u8], |
|
259 | 259 | ) -> Result<&DirstateParents, DirstateError> { |
|
260 | 260 | if let Some(ref parents) = self.parents { |
|
261 | 261 | return Ok(parents); |
|
262 | 262 | } |
|
263 | 263 | let parents; |
|
264 | 264 | if file_contents.len() == PARENT_SIZE * 2 { |
|
265 | 265 | parents = DirstateParents { |
|
266 | 266 | p1: file_contents[..PARENT_SIZE].try_into().unwrap(), |
|
267 | 267 | p2: file_contents[PARENT_SIZE..PARENT_SIZE * 2] |
|
268 | 268 | .try_into() |
|
269 | 269 | .unwrap(), |
|
270 | 270 | }; |
|
271 | 271 | } else if file_contents.is_empty() { |
|
272 | 272 | parents = DirstateParents { |
|
273 | 273 | p1: NULL_ID, |
|
274 | 274 | p2: NULL_ID, |
|
275 | 275 | }; |
|
276 | 276 | } else { |
|
277 | 277 | return Err(DirstateError::Parse(DirstateParseError::Damaged)); |
|
278 | 278 | } |
|
279 | 279 | |
|
280 | 280 | self.parents = Some(parents); |
|
281 | 281 | Ok(self.parents.as_ref().unwrap()) |
|
282 | 282 | } |
|
283 | 283 | |
|
284 | 284 | pub fn set_parents(&mut self, parents: &DirstateParents) { |
|
285 | 285 | self.parents = Some(parents.clone()); |
|
286 | 286 | self.dirty_parents = true; |
|
287 | 287 | } |
|
288 | 288 | |
|
289 | 289 | pub fn read( |
|
290 | 290 | &mut self, |
|
291 | 291 | file_contents: &[u8], |
|
292 | 292 | ) -> Result<Option<DirstateParents>, DirstateError> { |
|
293 | 293 | if file_contents.is_empty() { |
|
294 | 294 | return Ok(None); |
|
295 | 295 | } |
|
296 | 296 | |
|
297 | 297 | let parents = parse_dirstate( |
|
298 | 298 | &mut self.state_map, |
|
299 | 299 | &mut self.copy_map, |
|
300 | 300 | file_contents, |
|
301 | 301 | )?; |
|
302 | 302 | |
|
303 | 303 | if !self.dirty_parents { |
|
304 | 304 | self.set_parents(&parents); |
|
305 | 305 | } |
|
306 | 306 | |
|
307 | 307 | Ok(Some(parents)) |
|
308 | 308 | } |
|
309 | 309 | |
|
310 | 310 | pub fn pack( |
|
311 | 311 | &mut self, |
|
312 | 312 | parents: DirstateParents, |
|
313 | 313 | now: Duration, |
|
314 | 314 | ) -> Result<Vec<u8>, DirstateError> { |
|
315 | 315 | let packed = |
|
316 | 316 | pack_dirstate(&mut self.state_map, &self.copy_map, parents, now)?; |
|
317 | 317 | |
|
318 | 318 | self.dirty_parents = false; |
|
319 | 319 | |
|
320 | 320 | let result = self.non_normal_other_parent_entries(); |
|
321 | 321 | self.non_normal_set = result.0; |
|
322 | 322 | self.other_parent_set = result.1; |
|
323 | 323 | Ok(packed) |
|
324 | 324 | } |
|
325 | 325 | |
|
326 | 326 | pub fn build_file_fold_map(&mut self) -> &FileFoldMap { |
|
327 | 327 | if let Some(ref file_fold_map) = self.file_fold_map { |
|
328 | 328 | return file_fold_map; |
|
329 | 329 | } |
|
330 |
let mut new_file_fold_map = FileFoldMap:: |
|
|
330 | let mut new_file_fold_map = FileFoldMap::default(); | |
|
331 | 331 | for (filename, DirstateEntry { state, .. }) in self.state_map.borrow() |
|
332 | 332 | { |
|
333 | 333 | if *state == EntryState::Removed { |
|
334 | 334 | new_file_fold_map |
|
335 | 335 | .insert(normalize_case(filename), filename.to_owned()); |
|
336 | 336 | } |
|
337 | 337 | } |
|
338 | 338 | self.file_fold_map = Some(new_file_fold_map); |
|
339 | 339 | self.file_fold_map.as_ref().unwrap() |
|
340 | 340 | } |
|
341 | 341 | } |
|
342 | 342 | |
|
343 | 343 | #[cfg(test)] |
|
344 | 344 | mod tests { |
|
345 | 345 | use super::*; |
|
346 | 346 | |
|
347 | 347 | #[test] |
|
348 | 348 | fn test_dirs_multiset() { |
|
349 | 349 | let mut map = DirstateMap::new(); |
|
350 | 350 | assert!(map.dirs.is_none()); |
|
351 | 351 | assert!(map.all_dirs.is_none()); |
|
352 | 352 | |
|
353 | 353 | assert_eq!(false, map.has_dir(HgPath::new(b"nope"))); |
|
354 | 354 | assert!(map.all_dirs.is_some()); |
|
355 | 355 | assert!(map.dirs.is_none()); |
|
356 | 356 | |
|
357 | 357 | assert_eq!(false, map.has_tracked_dir(HgPath::new(b"nope"))); |
|
358 | 358 | assert!(map.dirs.is_some()); |
|
359 | 359 | } |
|
360 | 360 | |
|
361 | 361 | #[test] |
|
362 | 362 | fn test_add_file() { |
|
363 | 363 | let mut map = DirstateMap::new(); |
|
364 | 364 | |
|
365 | 365 | assert_eq!(0, map.len()); |
|
366 | 366 | |
|
367 | 367 | map.add_file( |
|
368 | 368 | HgPath::new(b"meh"), |
|
369 | 369 | EntryState::Normal, |
|
370 | 370 | DirstateEntry { |
|
371 | 371 | state: EntryState::Normal, |
|
372 | 372 | mode: 1337, |
|
373 | 373 | mtime: 1337, |
|
374 | 374 | size: 1337, |
|
375 | 375 | }, |
|
376 | 376 | ); |
|
377 | 377 | |
|
378 | 378 | assert_eq!(1, map.len()); |
|
379 | 379 | assert_eq!(0, map.non_normal_set.len()); |
|
380 | 380 | assert_eq!(0, map.other_parent_set.len()); |
|
381 | 381 | } |
|
382 | 382 | |
|
383 | 383 | #[test] |
|
384 | 384 | fn test_non_normal_other_parent_entries() { |
|
385 | 385 | let map: DirstateMap = [ |
|
386 | 386 | (b"f1", (EntryState::Removed, 1337, 1337, 1337)), |
|
387 | 387 | (b"f2", (EntryState::Normal, 1337, 1337, -1)), |
|
388 | 388 | (b"f3", (EntryState::Normal, 1337, 1337, 1337)), |
|
389 | 389 | (b"f4", (EntryState::Normal, 1337, -2, 1337)), |
|
390 | 390 | (b"f5", (EntryState::Added, 1337, 1337, 1337)), |
|
391 | 391 | (b"f6", (EntryState::Added, 1337, 1337, -1)), |
|
392 | 392 | (b"f7", (EntryState::Merged, 1337, 1337, -1)), |
|
393 | 393 | (b"f8", (EntryState::Merged, 1337, 1337, 1337)), |
|
394 | 394 | (b"f9", (EntryState::Merged, 1337, -2, 1337)), |
|
395 | 395 | (b"fa", (EntryState::Added, 1337, -2, 1337)), |
|
396 | 396 | (b"fb", (EntryState::Removed, 1337, -2, 1337)), |
|
397 | 397 | ] |
|
398 | 398 | .iter() |
|
399 | 399 | .map(|(fname, (state, mode, size, mtime))| { |
|
400 | 400 | ( |
|
401 | 401 | HgPathBuf::from_bytes(fname.as_ref()), |
|
402 | 402 | DirstateEntry { |
|
403 | 403 | state: *state, |
|
404 | 404 | mode: *mode, |
|
405 | 405 | size: *size, |
|
406 | 406 | mtime: *mtime, |
|
407 | 407 | }, |
|
408 | 408 | ) |
|
409 | 409 | }) |
|
410 | 410 | .collect(); |
|
411 | 411 | |
|
412 | 412 | let non_normal = [ |
|
413 | 413 | b"f1", b"f2", b"f5", b"f6", b"f7", b"f8", b"f9", b"fa", b"fb", |
|
414 | 414 | ] |
|
415 | 415 | .iter() |
|
416 | 416 | .map(|x| HgPathBuf::from_bytes(x.as_ref())) |
|
417 | 417 | .collect(); |
|
418 | 418 | |
|
419 | 419 | let mut other_parent = HashSet::new(); |
|
420 | 420 | other_parent.insert(HgPathBuf::from_bytes(b"f4")); |
|
421 | 421 | |
|
422 | 422 | assert_eq!( |
|
423 | 423 | (non_normal, other_parent), |
|
424 | 424 | map.non_normal_other_parent_entries() |
|
425 | 425 | ); |
|
426 | 426 | } |
|
427 | 427 | } |
@@ -1,434 +1,433 | |||
|
1 | 1 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
2 | 2 | // |
|
3 | 3 | // This software may be used and distributed according to the terms of the |
|
4 | 4 | // GNU General Public License version 2 or any later version. |
|
5 | 5 | |
|
6 | 6 | use crate::utils::hg_path::HgPath; |
|
7 | 7 | use crate::{ |
|
8 | 8 | dirstate::{CopyMap, EntryState, StateMap}, |
|
9 | 9 | DirstateEntry, DirstatePackError, DirstateParents, DirstateParseError, |
|
10 | 10 | }; |
|
11 | 11 | use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; |
|
12 | 12 | use std::convert::{TryFrom, TryInto}; |
|
13 | 13 | use std::io::Cursor; |
|
14 | 14 | use std::time::Duration; |
|
15 | 15 | |
|
16 | 16 | /// Parents are stored in the dirstate as byte hashes. |
|
17 | 17 | pub const PARENT_SIZE: usize = 20; |
|
18 | 18 | /// Dirstate entries have a static part of 8 + 32 + 32 + 32 + 32 bits. |
|
19 | 19 | const MIN_ENTRY_SIZE: usize = 17; |
|
20 | 20 | |
|
21 | 21 | // TODO parse/pack: is mutate-on-loop better for performance? |
|
22 | 22 | |
|
23 | 23 | pub fn parse_dirstate( |
|
24 | 24 | state_map: &mut StateMap, |
|
25 | 25 | copy_map: &mut CopyMap, |
|
26 | 26 | contents: &[u8], |
|
27 | 27 | ) -> Result<DirstateParents, DirstateParseError> { |
|
28 | 28 | if contents.len() < PARENT_SIZE * 2 { |
|
29 | 29 | return Err(DirstateParseError::TooLittleData); |
|
30 | 30 | } |
|
31 | 31 | |
|
32 | 32 | let mut curr_pos = PARENT_SIZE * 2; |
|
33 | 33 | let parents = DirstateParents { |
|
34 | 34 | p1: contents[..PARENT_SIZE].try_into().unwrap(), |
|
35 | 35 | p2: contents[PARENT_SIZE..curr_pos].try_into().unwrap(), |
|
36 | 36 | }; |
|
37 | 37 | |
|
38 | 38 | while curr_pos < contents.len() { |
|
39 | 39 | if curr_pos + MIN_ENTRY_SIZE > contents.len() { |
|
40 | 40 | return Err(DirstateParseError::Overflow); |
|
41 | 41 | } |
|
42 | 42 | let entry_bytes = &contents[curr_pos..]; |
|
43 | 43 | |
|
44 | 44 | let mut cursor = Cursor::new(entry_bytes); |
|
45 | 45 | let state = EntryState::try_from(cursor.read_u8()?)?; |
|
46 | 46 | let mode = cursor.read_i32::<BigEndian>()?; |
|
47 | 47 | let size = cursor.read_i32::<BigEndian>()?; |
|
48 | 48 | let mtime = cursor.read_i32::<BigEndian>()?; |
|
49 | 49 | let path_len = cursor.read_i32::<BigEndian>()? as usize; |
|
50 | 50 | |
|
51 | 51 | if path_len > contents.len() - curr_pos { |
|
52 | 52 | return Err(DirstateParseError::Overflow); |
|
53 | 53 | } |
|
54 | 54 | |
|
55 | 55 | // Slice instead of allocating a Vec needed for `read_exact` |
|
56 | 56 | let path = &entry_bytes[MIN_ENTRY_SIZE..MIN_ENTRY_SIZE + (path_len)]; |
|
57 | 57 | |
|
58 | 58 | let (path, copy) = match memchr::memchr(0, path) { |
|
59 | 59 | None => (path, None), |
|
60 | 60 | Some(i) => (&path[..i], Some(&path[(i + 1)..])), |
|
61 | 61 | }; |
|
62 | 62 | |
|
63 | 63 | if let Some(copy_path) = copy { |
|
64 | 64 | copy_map.insert( |
|
65 | 65 | HgPath::new(path).to_owned(), |
|
66 | 66 | HgPath::new(copy_path).to_owned(), |
|
67 | 67 | ); |
|
68 | 68 | }; |
|
69 | 69 | state_map.insert( |
|
70 | 70 | HgPath::new(path).to_owned(), |
|
71 | 71 | DirstateEntry { |
|
72 | 72 | state, |
|
73 | 73 | mode, |
|
74 | 74 | size, |
|
75 | 75 | mtime, |
|
76 | 76 | }, |
|
77 | 77 | ); |
|
78 | 78 | curr_pos = curr_pos + MIN_ENTRY_SIZE + (path_len); |
|
79 | 79 | } |
|
80 | 80 | |
|
81 | 81 | Ok(parents) |
|
82 | 82 | } |
|
83 | 83 | |
|
84 | 84 | /// `now` is the duration in seconds since the Unix epoch |
|
85 | 85 | pub fn pack_dirstate( |
|
86 | 86 | state_map: &mut StateMap, |
|
87 | 87 | copy_map: &CopyMap, |
|
88 | 88 | parents: DirstateParents, |
|
89 | 89 | now: Duration, |
|
90 | 90 | ) -> Result<Vec<u8>, DirstatePackError> { |
|
91 | 91 | // TODO move away from i32 before 2038. |
|
92 | 92 | let now: i32 = now.as_secs().try_into().expect("time overflow"); |
|
93 | 93 | |
|
94 | 94 | let expected_size: usize = state_map |
|
95 | 95 | .iter() |
|
96 | 96 | .map(|(filename, _)| { |
|
97 | 97 | let mut length = MIN_ENTRY_SIZE + filename.len(); |
|
98 | 98 | if let Some(copy) = copy_map.get(filename) { |
|
99 | 99 | length += copy.len() + 1; |
|
100 | 100 | } |
|
101 | 101 | length |
|
102 | 102 | }) |
|
103 | 103 | .sum(); |
|
104 | 104 | let expected_size = expected_size + PARENT_SIZE * 2; |
|
105 | 105 | |
|
106 | 106 | let mut packed = Vec::with_capacity(expected_size); |
|
107 | 107 | let mut new_state_map = vec![]; |
|
108 | 108 | |
|
109 | 109 | packed.extend(&parents.p1); |
|
110 | 110 | packed.extend(&parents.p2); |
|
111 | 111 | |
|
112 | 112 | for (filename, entry) in state_map.iter() { |
|
113 | 113 | let new_filename = filename.to_owned(); |
|
114 | 114 | let mut new_mtime: i32 = entry.mtime; |
|
115 | 115 | if entry.state == EntryState::Normal && entry.mtime == now { |
|
116 | 116 | // The file was last modified "simultaneously" with the current |
|
117 | 117 | // write to dirstate (i.e. within the same second for file- |
|
118 | 118 | // systems with a granularity of 1 sec). This commonly happens |
|
119 | 119 | // for at least a couple of files on 'update'. |
|
120 | 120 | // The user could change the file without changing its size |
|
121 | 121 | // within the same second. Invalidate the file's mtime in |
|
122 | 122 | // dirstate, forcing future 'status' calls to compare the |
|
123 | 123 | // contents of the file if the size is the same. This prevents |
|
124 | 124 | // mistakenly treating such files as clean. |
|
125 | 125 | new_mtime = -1; |
|
126 | 126 | new_state_map.push(( |
|
127 | 127 | filename.to_owned(), |
|
128 | 128 | DirstateEntry { |
|
129 | 129 | mtime: new_mtime, |
|
130 | 130 | ..*entry |
|
131 | 131 | }, |
|
132 | 132 | )); |
|
133 | 133 | } |
|
134 | 134 | let mut new_filename = new_filename.into_vec(); |
|
135 | 135 | if let Some(copy) = copy_map.get(filename) { |
|
136 | 136 | new_filename.push('\0' as u8); |
|
137 | 137 | new_filename.extend(copy.bytes()); |
|
138 | 138 | } |
|
139 | 139 | |
|
140 | 140 | packed.write_u8(entry.state.into())?; |
|
141 | 141 | packed.write_i32::<BigEndian>(entry.mode)?; |
|
142 | 142 | packed.write_i32::<BigEndian>(entry.size)?; |
|
143 | 143 | packed.write_i32::<BigEndian>(new_mtime)?; |
|
144 | 144 | packed.write_i32::<BigEndian>(new_filename.len() as i32)?; |
|
145 | 145 | packed.extend(new_filename) |
|
146 | 146 | } |
|
147 | 147 | |
|
148 | 148 | if packed.len() != expected_size { |
|
149 | 149 | return Err(DirstatePackError::BadSize(expected_size, packed.len())); |
|
150 | 150 | } |
|
151 | 151 | |
|
152 | 152 | state_map.extend(new_state_map); |
|
153 | 153 | |
|
154 | 154 | Ok(packed) |
|
155 | 155 | } |
|
156 | 156 | |
|
157 | 157 | #[cfg(test)] |
|
158 | 158 | mod tests { |
|
159 | 159 | use super::*; |
|
160 | use crate::utils::hg_path::HgPathBuf; | |
|
161 | use std::collections::HashMap; | |
|
160 | use crate::{utils::hg_path::HgPathBuf, FastHashMap}; | |
|
162 | 161 | |
|
163 | 162 | #[test] |
|
164 | 163 | fn test_pack_dirstate_empty() { |
|
165 |
let mut state_map: StateMap = HashMap:: |
|
|
166 |
let copymap = HashMap:: |
|
|
164 | let mut state_map: StateMap = FastHashMap::default(); | |
|
165 | let copymap = FastHashMap::default(); | |
|
167 | 166 | let parents = DirstateParents { |
|
168 | 167 | p1: *b"12345678910111213141", |
|
169 | 168 | p2: *b"00000000000000000000", |
|
170 | 169 | }; |
|
171 | 170 | let now = Duration::new(15000000, 0); |
|
172 | 171 | let expected = b"1234567891011121314100000000000000000000".to_vec(); |
|
173 | 172 | |
|
174 | 173 | assert_eq!( |
|
175 | 174 | expected, |
|
176 | 175 | pack_dirstate(&mut state_map, ©map, parents, now).unwrap() |
|
177 | 176 | ); |
|
178 | 177 | |
|
179 | 178 | assert!(state_map.is_empty()) |
|
180 | 179 | } |
|
181 | 180 | #[test] |
|
182 | 181 | fn test_pack_dirstate_one_entry() { |
|
183 | 182 | let expected_state_map: StateMap = [( |
|
184 | 183 | HgPathBuf::from_bytes(b"f1"), |
|
185 | 184 | DirstateEntry { |
|
186 | 185 | state: EntryState::Normal, |
|
187 | 186 | mode: 0o644, |
|
188 | 187 | size: 0, |
|
189 | 188 | mtime: 791231220, |
|
190 | 189 | }, |
|
191 | 190 | )] |
|
192 | 191 | .iter() |
|
193 | 192 | .cloned() |
|
194 | 193 | .collect(); |
|
195 | 194 | let mut state_map = expected_state_map.clone(); |
|
196 | 195 | |
|
197 |
let copymap = HashMap:: |
|
|
196 | let copymap = FastHashMap::default(); | |
|
198 | 197 | let parents = DirstateParents { |
|
199 | 198 | p1: *b"12345678910111213141", |
|
200 | 199 | p2: *b"00000000000000000000", |
|
201 | 200 | }; |
|
202 | 201 | let now = Duration::new(15000000, 0); |
|
203 | 202 | let expected = [ |
|
204 | 203 | 49, 50, 51, 52, 53, 54, 55, 56, 57, 49, 48, 49, 49, 49, 50, 49, |
|
205 | 204 | 51, 49, 52, 49, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, |
|
206 | 205 | 48, 48, 48, 48, 48, 48, 48, 48, 110, 0, 0, 1, 164, 0, 0, 0, 0, 47, |
|
207 | 206 | 41, 58, 244, 0, 0, 0, 2, 102, 49, |
|
208 | 207 | ] |
|
209 | 208 | .to_vec(); |
|
210 | 209 | |
|
211 | 210 | assert_eq!( |
|
212 | 211 | expected, |
|
213 | 212 | pack_dirstate(&mut state_map, ©map, parents, now).unwrap() |
|
214 | 213 | ); |
|
215 | 214 | |
|
216 | 215 | assert_eq!(expected_state_map, state_map); |
|
217 | 216 | } |
|
218 | 217 | #[test] |
|
219 | 218 | fn test_pack_dirstate_one_entry_with_copy() { |
|
220 | 219 | let expected_state_map: StateMap = [( |
|
221 | 220 | HgPathBuf::from_bytes(b"f1"), |
|
222 | 221 | DirstateEntry { |
|
223 | 222 | state: EntryState::Normal, |
|
224 | 223 | mode: 0o644, |
|
225 | 224 | size: 0, |
|
226 | 225 | mtime: 791231220, |
|
227 | 226 | }, |
|
228 | 227 | )] |
|
229 | 228 | .iter() |
|
230 | 229 | .cloned() |
|
231 | 230 | .collect(); |
|
232 | 231 | let mut state_map = expected_state_map.clone(); |
|
233 |
let mut copymap = HashMap:: |
|
|
232 | let mut copymap = FastHashMap::default(); | |
|
234 | 233 | copymap.insert( |
|
235 | 234 | HgPathBuf::from_bytes(b"f1"), |
|
236 | 235 | HgPathBuf::from_bytes(b"copyname"), |
|
237 | 236 | ); |
|
238 | 237 | let parents = DirstateParents { |
|
239 | 238 | p1: *b"12345678910111213141", |
|
240 | 239 | p2: *b"00000000000000000000", |
|
241 | 240 | }; |
|
242 | 241 | let now = Duration::new(15000000, 0); |
|
243 | 242 | let expected = [ |
|
244 | 243 | 49, 50, 51, 52, 53, 54, 55, 56, 57, 49, 48, 49, 49, 49, 50, 49, |
|
245 | 244 | 51, 49, 52, 49, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, |
|
246 | 245 | 48, 48, 48, 48, 48, 48, 48, 48, 110, 0, 0, 1, 164, 0, 0, 0, 0, 47, |
|
247 | 246 | 41, 58, 244, 0, 0, 0, 11, 102, 49, 0, 99, 111, 112, 121, 110, 97, |
|
248 | 247 | 109, 101, |
|
249 | 248 | ] |
|
250 | 249 | .to_vec(); |
|
251 | 250 | |
|
252 | 251 | assert_eq!( |
|
253 | 252 | expected, |
|
254 | 253 | pack_dirstate(&mut state_map, ©map, parents, now).unwrap() |
|
255 | 254 | ); |
|
256 | 255 | assert_eq!(expected_state_map, state_map); |
|
257 | 256 | } |
|
258 | 257 | |
|
259 | 258 | #[test] |
|
260 | 259 | fn test_parse_pack_one_entry_with_copy() { |
|
261 | 260 | let mut state_map: StateMap = [( |
|
262 | 261 | HgPathBuf::from_bytes(b"f1"), |
|
263 | 262 | DirstateEntry { |
|
264 | 263 | state: EntryState::Normal, |
|
265 | 264 | mode: 0o644, |
|
266 | 265 | size: 0, |
|
267 | 266 | mtime: 791231220, |
|
268 | 267 | }, |
|
269 | 268 | )] |
|
270 | 269 | .iter() |
|
271 | 270 | .cloned() |
|
272 | 271 | .collect(); |
|
273 |
let mut copymap = HashMap:: |
|
|
272 | let mut copymap = FastHashMap::default(); | |
|
274 | 273 | copymap.insert( |
|
275 | 274 | HgPathBuf::from_bytes(b"f1"), |
|
276 | 275 | HgPathBuf::from_bytes(b"copyname"), |
|
277 | 276 | ); |
|
278 | 277 | let parents = DirstateParents { |
|
279 | 278 | p1: *b"12345678910111213141", |
|
280 | 279 | p2: *b"00000000000000000000", |
|
281 | 280 | }; |
|
282 | 281 | let now = Duration::new(15000000, 0); |
|
283 | 282 | let result = |
|
284 | 283 | pack_dirstate(&mut state_map, ©map, parents.clone(), now) |
|
285 | 284 | .unwrap(); |
|
286 | 285 | |
|
287 |
let mut new_state_map: StateMap = HashMap:: |
|
|
288 |
let mut new_copy_map: CopyMap = HashMap:: |
|
|
286 | let mut new_state_map: StateMap = FastHashMap::default(); | |
|
287 | let mut new_copy_map: CopyMap = FastHashMap::default(); | |
|
289 | 288 | let new_parents = parse_dirstate( |
|
290 | 289 | &mut new_state_map, |
|
291 | 290 | &mut new_copy_map, |
|
292 | 291 | result.as_slice(), |
|
293 | 292 | ) |
|
294 | 293 | .unwrap(); |
|
295 | 294 | assert_eq!( |
|
296 | 295 | (parents, state_map, copymap), |
|
297 | 296 | (new_parents, new_state_map, new_copy_map) |
|
298 | 297 | ) |
|
299 | 298 | } |
|
300 | 299 | |
|
301 | 300 | #[test] |
|
302 | 301 | fn test_parse_pack_multiple_entries_with_copy() { |
|
303 | 302 | let mut state_map: StateMap = [ |
|
304 | 303 | ( |
|
305 | 304 | HgPathBuf::from_bytes(b"f1"), |
|
306 | 305 | DirstateEntry { |
|
307 | 306 | state: EntryState::Normal, |
|
308 | 307 | mode: 0o644, |
|
309 | 308 | size: 0, |
|
310 | 309 | mtime: 791231220, |
|
311 | 310 | }, |
|
312 | 311 | ), |
|
313 | 312 | ( |
|
314 | 313 | HgPathBuf::from_bytes(b"f2"), |
|
315 | 314 | DirstateEntry { |
|
316 | 315 | state: EntryState::Merged, |
|
317 | 316 | mode: 0o777, |
|
318 | 317 | size: 1000, |
|
319 | 318 | mtime: 791231220, |
|
320 | 319 | }, |
|
321 | 320 | ), |
|
322 | 321 | ( |
|
323 | 322 | HgPathBuf::from_bytes(b"f3"), |
|
324 | 323 | DirstateEntry { |
|
325 | 324 | state: EntryState::Removed, |
|
326 | 325 | mode: 0o644, |
|
327 | 326 | size: 234553, |
|
328 | 327 | mtime: 791231220, |
|
329 | 328 | }, |
|
330 | 329 | ), |
|
331 | 330 | ( |
|
332 | 331 | HgPathBuf::from_bytes(b"f4\xF6"), |
|
333 | 332 | DirstateEntry { |
|
334 | 333 | state: EntryState::Added, |
|
335 | 334 | mode: 0o644, |
|
336 | 335 | size: -1, |
|
337 | 336 | mtime: -1, |
|
338 | 337 | }, |
|
339 | 338 | ), |
|
340 | 339 | ] |
|
341 | 340 | .iter() |
|
342 | 341 | .cloned() |
|
343 | 342 | .collect(); |
|
344 |
let mut copymap = HashMap:: |
|
|
343 | let mut copymap = FastHashMap::default(); | |
|
345 | 344 | copymap.insert( |
|
346 | 345 | HgPathBuf::from_bytes(b"f1"), |
|
347 | 346 | HgPathBuf::from_bytes(b"copyname"), |
|
348 | 347 | ); |
|
349 | 348 | copymap.insert( |
|
350 | 349 | HgPathBuf::from_bytes(b"f4\xF6"), |
|
351 | 350 | HgPathBuf::from_bytes(b"copyname2"), |
|
352 | 351 | ); |
|
353 | 352 | let parents = DirstateParents { |
|
354 | 353 | p1: *b"12345678910111213141", |
|
355 | 354 | p2: *b"00000000000000000000", |
|
356 | 355 | }; |
|
357 | 356 | let now = Duration::new(15000000, 0); |
|
358 | 357 | let result = |
|
359 | 358 | pack_dirstate(&mut state_map, ©map, parents.clone(), now) |
|
360 | 359 | .unwrap(); |
|
361 | 360 | |
|
362 |
let mut new_state_map: StateMap = HashMap:: |
|
|
363 |
let mut new_copy_map: CopyMap = HashMap:: |
|
|
361 | let mut new_state_map: StateMap = FastHashMap::default(); | |
|
362 | let mut new_copy_map: CopyMap = FastHashMap::default(); | |
|
364 | 363 | let new_parents = parse_dirstate( |
|
365 | 364 | &mut new_state_map, |
|
366 | 365 | &mut new_copy_map, |
|
367 | 366 | result.as_slice(), |
|
368 | 367 | ) |
|
369 | 368 | .unwrap(); |
|
370 | 369 | assert_eq!( |
|
371 | 370 | (parents, state_map, copymap), |
|
372 | 371 | (new_parents, new_state_map, new_copy_map) |
|
373 | 372 | ) |
|
374 | 373 | } |
|
375 | 374 | |
|
376 | 375 | #[test] |
|
377 | 376 | /// https://www.mercurial-scm.org/repo/hg/rev/af3f26b6bba4 |
|
378 | 377 | fn test_parse_pack_one_entry_with_copy_and_time_conflict() { |
|
379 | 378 | let mut state_map: StateMap = [( |
|
380 | 379 | HgPathBuf::from_bytes(b"f1"), |
|
381 | 380 | DirstateEntry { |
|
382 | 381 | state: EntryState::Normal, |
|
383 | 382 | mode: 0o644, |
|
384 | 383 | size: 0, |
|
385 | 384 | mtime: 15000000, |
|
386 | 385 | }, |
|
387 | 386 | )] |
|
388 | 387 | .iter() |
|
389 | 388 | .cloned() |
|
390 | 389 | .collect(); |
|
391 |
let mut copymap = HashMap:: |
|
|
390 | let mut copymap = FastHashMap::default(); | |
|
392 | 391 | copymap.insert( |
|
393 | 392 | HgPathBuf::from_bytes(b"f1"), |
|
394 | 393 | HgPathBuf::from_bytes(b"copyname"), |
|
395 | 394 | ); |
|
396 | 395 | let parents = DirstateParents { |
|
397 | 396 | p1: *b"12345678910111213141", |
|
398 | 397 | p2: *b"00000000000000000000", |
|
399 | 398 | }; |
|
400 | 399 | let now = Duration::new(15000000, 0); |
|
401 | 400 | let result = |
|
402 | 401 | pack_dirstate(&mut state_map, ©map, parents.clone(), now) |
|
403 | 402 | .unwrap(); |
|
404 | 403 | |
|
405 |
let mut new_state_map: StateMap = HashMap:: |
|
|
406 |
let mut new_copy_map: CopyMap = HashMap:: |
|
|
404 | let mut new_state_map: StateMap = FastHashMap::default(); | |
|
405 | let mut new_copy_map: CopyMap = FastHashMap::default(); | |
|
407 | 406 | let new_parents = parse_dirstate( |
|
408 | 407 | &mut new_state_map, |
|
409 | 408 | &mut new_copy_map, |
|
410 | 409 | result.as_slice(), |
|
411 | 410 | ) |
|
412 | 411 | .unwrap(); |
|
413 | 412 | |
|
414 | 413 | assert_eq!( |
|
415 | 414 | ( |
|
416 | 415 | parents, |
|
417 | 416 | [( |
|
418 | 417 | HgPathBuf::from_bytes(b"f1"), |
|
419 | 418 | DirstateEntry { |
|
420 | 419 | state: EntryState::Normal, |
|
421 | 420 | mode: 0o644, |
|
422 | 421 | size: 0, |
|
423 | 422 | mtime: -1 |
|
424 | 423 | } |
|
425 | 424 | )] |
|
426 | 425 | .iter() |
|
427 | 426 | .cloned() |
|
428 | 427 | .collect::<StateMap>(), |
|
429 | 428 | copymap, |
|
430 | 429 | ), |
|
431 | 430 | (new_parents, new_state_map, new_copy_map) |
|
432 | 431 | ) |
|
433 | 432 | } |
|
434 | 433 | } |
@@ -1,694 +1,694 | |||
|
1 | 1 | // discovery.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Georges Racinet <georges.racinet@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | //! Discovery operations |
|
9 | 9 | //! |
|
10 | 10 | //! This is a Rust counterpart to the `partialdiscovery` class of |
|
11 | 11 | //! `mercurial.setdiscovery` |
|
12 | 12 | |
|
13 | 13 | use super::{Graph, GraphError, Revision, NULL_REVISION}; |
|
14 | use crate::ancestors::MissingAncestors; | |
|
15 | use crate::dagops; | |
|
14 | use crate::{ancestors::MissingAncestors, dagops, FastHashMap}; | |
|
16 | 15 | use rand::seq::SliceRandom; |
|
17 | 16 | use rand::{thread_rng, RngCore, SeedableRng}; |
|
18 | 17 | use std::cmp::{max, min}; |
|
19 |
use std::collections::{ |
|
|
18 | use std::collections::{HashSet, VecDeque}; | |
|
20 | 19 | |
|
21 | 20 | type Rng = rand_pcg::Pcg32; |
|
22 | 21 | |
|
23 | 22 | pub struct PartialDiscovery<G: Graph + Clone> { |
|
24 | 23 | target_heads: Option<Vec<Revision>>, |
|
25 | 24 | graph: G, // plays the role of self._repo |
|
26 | 25 | common: MissingAncestors<G>, |
|
27 | 26 | undecided: Option<HashSet<Revision>>, |
|
28 | children_cache: Option<HashMap<Revision, Vec<Revision>>>, | |
|
27 | children_cache: Option<FastHashMap<Revision, Vec<Revision>>>, | |
|
29 | 28 | missing: HashSet<Revision>, |
|
30 | 29 | rng: Rng, |
|
31 | 30 | respect_size: bool, |
|
32 | 31 | randomize: bool, |
|
33 | 32 | } |
|
34 | 33 | |
|
35 | 34 | pub struct DiscoveryStats { |
|
36 | 35 | pub undecided: Option<usize>, |
|
37 | 36 | } |
|
38 | 37 | |
|
39 | 38 | /// Update an existing sample to match the expected size |
|
40 | 39 | /// |
|
41 | 40 | /// The sample is updated with revisions exponentially distant from each |
|
42 | 41 | /// element of `heads`. |
|
43 | 42 | /// |
|
44 | 43 | /// If a target size is specified, the sampling will stop once this size is |
|
45 | 44 | /// reached. Otherwise sampling will happen until roots of the <revs> set are |
|
46 | 45 | /// reached. |
|
47 | 46 | /// |
|
48 | 47 | /// - `revs`: set of revs we want to discover (if None, `assume` the whole dag |
|
49 | 48 | /// represented by `parentfn` |
|
50 | 49 | /// - `heads`: set of DAG head revs |
|
51 | 50 | /// - `sample`: a sample to update |
|
52 | 51 | /// - `parentfn`: a callable to resolve parents for a revision |
|
53 | 52 | /// - `quicksamplesize`: optional target size of the sample |
|
54 | 53 | fn update_sample<I>( |
|
55 | 54 | revs: Option<&HashSet<Revision>>, |
|
56 | 55 | heads: impl IntoIterator<Item = Revision>, |
|
57 | 56 | sample: &mut HashSet<Revision>, |
|
58 | 57 | parentsfn: impl Fn(Revision) -> Result<I, GraphError>, |
|
59 | 58 | quicksamplesize: Option<usize>, |
|
60 | 59 | ) -> Result<(), GraphError> |
|
61 | 60 | where |
|
62 | 61 | I: Iterator<Item = Revision>, |
|
63 | 62 | { |
|
64 |
let mut distances: HashMap<Revision, u32> = HashMap:: |
|
|
63 | let mut distances: FastHashMap<Revision, u32> = FastHashMap::default(); | |
|
65 | 64 | let mut visit: VecDeque<Revision> = heads.into_iter().collect(); |
|
66 | 65 | let mut factor: u32 = 1; |
|
67 | 66 | let mut seen: HashSet<Revision> = HashSet::new(); |
|
68 | 67 | while let Some(current) = visit.pop_front() { |
|
69 | 68 | if !seen.insert(current) { |
|
70 | 69 | continue; |
|
71 | 70 | } |
|
72 | 71 | |
|
73 | 72 | let d = *distances.entry(current).or_insert(1); |
|
74 | 73 | if d > factor { |
|
75 | 74 | factor *= 2; |
|
76 | 75 | } |
|
77 | 76 | if d == factor { |
|
78 | 77 | sample.insert(current); |
|
79 | 78 | if let Some(sz) = quicksamplesize { |
|
80 | 79 | if sample.len() >= sz { |
|
81 | 80 | return Ok(()); |
|
82 | 81 | } |
|
83 | 82 | } |
|
84 | 83 | } |
|
85 | 84 | for p in parentsfn(current)? { |
|
86 | 85 | if let Some(revs) = revs { |
|
87 | 86 | if !revs.contains(&p) { |
|
88 | 87 | continue; |
|
89 | 88 | } |
|
90 | 89 | } |
|
91 | 90 | distances.entry(p).or_insert(d + 1); |
|
92 | 91 | visit.push_back(p); |
|
93 | 92 | } |
|
94 | 93 | } |
|
95 | 94 | Ok(()) |
|
96 | 95 | } |
|
97 | 96 | |
|
98 | 97 | struct ParentsIterator { |
|
99 | 98 | parents: [Revision; 2], |
|
100 | 99 | cur: usize, |
|
101 | 100 | } |
|
102 | 101 | |
|
103 | 102 | impl ParentsIterator { |
|
104 | 103 | fn graph_parents( |
|
105 | 104 | graph: &impl Graph, |
|
106 | 105 | r: Revision, |
|
107 | 106 | ) -> Result<ParentsIterator, GraphError> { |
|
108 | 107 | Ok(ParentsIterator { |
|
109 | 108 | parents: graph.parents(r)?, |
|
110 | 109 | cur: 0, |
|
111 | 110 | }) |
|
112 | 111 | } |
|
113 | 112 | } |
|
114 | 113 | |
|
115 | 114 | impl Iterator for ParentsIterator { |
|
116 | 115 | type Item = Revision; |
|
117 | 116 | |
|
118 | 117 | fn next(&mut self) -> Option<Revision> { |
|
119 | 118 | if self.cur > 1 { |
|
120 | 119 | return None; |
|
121 | 120 | } |
|
122 | 121 | let rev = self.parents[self.cur]; |
|
123 | 122 | self.cur += 1; |
|
124 | 123 | if rev == NULL_REVISION { |
|
125 | 124 | return self.next(); |
|
126 | 125 | } |
|
127 | 126 | Some(rev) |
|
128 | 127 | } |
|
129 | 128 | } |
|
130 | 129 | |
|
131 | 130 | impl<G: Graph + Clone> PartialDiscovery<G> { |
|
132 | 131 | /// Create a PartialDiscovery object, with the intent |
|
133 | 132 | /// of comparing our `::<target_heads>` revset to the contents of another |
|
134 | 133 | /// repo. |
|
135 | 134 | /// |
|
136 | 135 | /// For now `target_heads` is passed as a vector, and will be used |
|
137 | 136 | /// at the first call to `ensure_undecided()`. |
|
138 | 137 | /// |
|
139 | 138 | /// If we want to make the signature more flexible, |
|
140 | 139 | /// we'll have to make it a type argument of `PartialDiscovery` or a trait |
|
141 | 140 | /// object since we'll keep it in the meanwhile |
|
142 | 141 | /// |
|
143 | 142 | /// The `respect_size` boolean controls how the sampling methods |
|
144 | 143 | /// will interpret the size argument requested by the caller. If it's |
|
145 | 144 | /// `false`, they are allowed to produce a sample whose size is more |
|
146 | 145 | /// appropriate to the situation (typically bigger). |
|
147 | 146 | /// |
|
148 | 147 | /// The `randomize` boolean affects sampling, and specifically how |
|
149 | 148 | /// limiting or last-minute expanding is been done: |
|
150 | 149 | /// |
|
151 | 150 | /// If `true`, both will perform random picking from `self.undecided`. |
|
152 | 151 | /// This is currently the best for actual discoveries. |
|
153 | 152 | /// |
|
154 | 153 | /// If `false`, a reproductible picking strategy is performed. This is |
|
155 | 154 | /// useful for integration tests. |
|
156 | 155 | pub fn new( |
|
157 | 156 | graph: G, |
|
158 | 157 | target_heads: Vec<Revision>, |
|
159 | 158 | respect_size: bool, |
|
160 | 159 | randomize: bool, |
|
161 | 160 | ) -> Self { |
|
162 | 161 | let mut seed: [u8; 16] = [0; 16]; |
|
163 | 162 | if randomize { |
|
164 | 163 | thread_rng().fill_bytes(&mut seed); |
|
165 | 164 | } |
|
166 | 165 | Self::new_with_seed(graph, target_heads, seed, respect_size, randomize) |
|
167 | 166 | } |
|
168 | 167 | |
|
169 | 168 | pub fn new_with_seed( |
|
170 | 169 | graph: G, |
|
171 | 170 | target_heads: Vec<Revision>, |
|
172 | 171 | seed: [u8; 16], |
|
173 | 172 | respect_size: bool, |
|
174 | 173 | randomize: bool, |
|
175 | 174 | ) -> Self { |
|
176 | 175 | PartialDiscovery { |
|
177 | 176 | undecided: None, |
|
178 | 177 | children_cache: None, |
|
179 | 178 | target_heads: Some(target_heads), |
|
180 | 179 | graph: graph.clone(), |
|
181 | 180 | common: MissingAncestors::new(graph, vec![]), |
|
182 | 181 | missing: HashSet::new(), |
|
183 | 182 | rng: Rng::from_seed(seed), |
|
184 | 183 | respect_size: respect_size, |
|
185 | 184 | randomize: randomize, |
|
186 | 185 | } |
|
187 | 186 | } |
|
188 | 187 | |
|
189 | 188 | /// Extract at most `size` random elements from sample and return them |
|
190 | 189 | /// as a vector |
|
191 | 190 | fn limit_sample( |
|
192 | 191 | &mut self, |
|
193 | 192 | mut sample: Vec<Revision>, |
|
194 | 193 | size: usize, |
|
195 | 194 | ) -> Vec<Revision> { |
|
196 | 195 | if !self.randomize { |
|
197 | 196 | sample.sort(); |
|
198 | 197 | sample.truncate(size); |
|
199 | 198 | return sample; |
|
200 | 199 | } |
|
201 | 200 | let sample_len = sample.len(); |
|
202 | 201 | if sample_len <= size { |
|
203 | 202 | return sample; |
|
204 | 203 | } |
|
205 | 204 | let rng = &mut self.rng; |
|
206 | 205 | let dropped_size = sample_len - size; |
|
207 | 206 | let limited_slice = if size < dropped_size { |
|
208 | 207 | sample.partial_shuffle(rng, size).0 |
|
209 | 208 | } else { |
|
210 | 209 | sample.partial_shuffle(rng, dropped_size).1 |
|
211 | 210 | }; |
|
212 | 211 | limited_slice.to_owned() |
|
213 | 212 | } |
|
214 | 213 | |
|
215 | 214 | /// Register revisions known as being common |
|
216 | 215 | pub fn add_common_revisions( |
|
217 | 216 | &mut self, |
|
218 | 217 | common: impl IntoIterator<Item = Revision>, |
|
219 | 218 | ) -> Result<(), GraphError> { |
|
220 | 219 | let before_len = self.common.get_bases().len(); |
|
221 | 220 | self.common.add_bases(common); |
|
222 | 221 | if self.common.get_bases().len() == before_len { |
|
223 | 222 | return Ok(()); |
|
224 | 223 | } |
|
225 | 224 | if let Some(ref mut undecided) = self.undecided { |
|
226 | 225 | self.common.remove_ancestors_from(undecided)?; |
|
227 | 226 | } |
|
228 | 227 | Ok(()) |
|
229 | 228 | } |
|
230 | 229 | |
|
231 | 230 | /// Register revisions known as being missing |
|
232 | 231 | /// |
|
233 | 232 | /// # Performance note |
|
234 | 233 | /// |
|
235 | 234 | /// Except in the most trivial case, the first call of this method has |
|
236 | 235 | /// the side effect of computing `self.undecided` set for the first time, |
|
237 | 236 | /// and the related caches it might need for efficiency of its internal |
|
238 | 237 | /// computation. This is typically faster if more information is |
|
239 | 238 | /// available in `self.common`. Therefore, for good performance, the |
|
240 | 239 | /// caller should avoid calling this too early. |
|
241 | 240 | pub fn add_missing_revisions( |
|
242 | 241 | &mut self, |
|
243 | 242 | missing: impl IntoIterator<Item = Revision>, |
|
244 | 243 | ) -> Result<(), GraphError> { |
|
245 | 244 | let mut tovisit: VecDeque<Revision> = missing.into_iter().collect(); |
|
246 | 245 | if tovisit.is_empty() { |
|
247 | 246 | return Ok(()); |
|
248 | 247 | } |
|
249 | 248 | self.ensure_children_cache()?; |
|
250 | 249 | self.ensure_undecided()?; // for safety of possible future refactors |
|
251 | 250 | let children = self.children_cache.as_ref().unwrap(); |
|
252 | 251 | let mut seen: HashSet<Revision> = HashSet::new(); |
|
253 | 252 | let undecided_mut = self.undecided.as_mut().unwrap(); |
|
254 | 253 | while let Some(rev) = tovisit.pop_front() { |
|
255 | 254 | if !self.missing.insert(rev) { |
|
256 | 255 | // either it's known to be missing from a previous |
|
257 | 256 | // invocation, and there's no need to iterate on its |
|
258 | 257 | // children (we now they are all missing) |
|
259 | 258 | // or it's from a previous iteration of this loop |
|
260 | 259 | // and its children have already been queued |
|
261 | 260 | continue; |
|
262 | 261 | } |
|
263 | 262 | undecided_mut.remove(&rev); |
|
264 | 263 | match children.get(&rev) { |
|
265 | 264 | None => { |
|
266 | 265 | continue; |
|
267 | 266 | } |
|
268 | 267 | Some(this_children) => { |
|
269 | 268 | for child in this_children.iter().cloned() { |
|
270 | 269 | if seen.insert(child) { |
|
271 | 270 | tovisit.push_back(child); |
|
272 | 271 | } |
|
273 | 272 | } |
|
274 | 273 | } |
|
275 | 274 | } |
|
276 | 275 | } |
|
277 | 276 | Ok(()) |
|
278 | 277 | } |
|
279 | 278 | |
|
280 | 279 | /// Do we have any information about the peer? |
|
281 | 280 | pub fn has_info(&self) -> bool { |
|
282 | 281 | self.common.has_bases() |
|
283 | 282 | } |
|
284 | 283 | |
|
285 | 284 | /// Did we acquire full knowledge of our Revisions that the peer has? |
|
286 | 285 | pub fn is_complete(&self) -> bool { |
|
287 | 286 | self.undecided.as_ref().map_or(false, |s| s.is_empty()) |
|
288 | 287 | } |
|
289 | 288 | |
|
290 | 289 | /// Return the heads of the currently known common set of revisions. |
|
291 | 290 | /// |
|
292 | 291 | /// If the discovery process is not complete (see `is_complete()`), the |
|
293 | 292 | /// caller must be aware that this is an intermediate state. |
|
294 | 293 | /// |
|
295 | 294 | /// On the other hand, if it is complete, then this is currently |
|
296 | 295 | /// the only way to retrieve the end results of the discovery process. |
|
297 | 296 | /// |
|
298 | 297 | /// We may introduce in the future an `into_common_heads` call that |
|
299 | 298 | /// would be more appropriate for normal Rust callers, dropping `self` |
|
300 | 299 | /// if it is complete. |
|
301 | 300 | pub fn common_heads(&self) -> Result<HashSet<Revision>, GraphError> { |
|
302 | 301 | self.common.bases_heads() |
|
303 | 302 | } |
|
304 | 303 | |
|
305 | 304 | /// Force first computation of `self.undecided` |
|
306 | 305 | /// |
|
307 | 306 | /// After this, `self.undecided.as_ref()` and `.as_mut()` can be |
|
308 | 307 | /// unwrapped to get workable immutable or mutable references without |
|
309 | 308 | /// any panic. |
|
310 | 309 | /// |
|
311 | 310 | /// This is an imperative call instead of an access with added lazyness |
|
312 | 311 | /// to reduce easily the scope of mutable borrow for the caller, |
|
313 | 312 | /// compared to undecided(&'a mut self) -> &'a… that would keep it |
|
314 | 313 | /// as long as the resulting immutable one. |
|
315 | 314 | fn ensure_undecided(&mut self) -> Result<(), GraphError> { |
|
316 | 315 | if self.undecided.is_some() { |
|
317 | 316 | return Ok(()); |
|
318 | 317 | } |
|
319 | 318 | let tgt = self.target_heads.take().unwrap(); |
|
320 | 319 | self.undecided = |
|
321 | 320 | Some(self.common.missing_ancestors(tgt)?.into_iter().collect()); |
|
322 | 321 | Ok(()) |
|
323 | 322 | } |
|
324 | 323 | |
|
325 | 324 | fn ensure_children_cache(&mut self) -> Result<(), GraphError> { |
|
326 | 325 | if self.children_cache.is_some() { |
|
327 | 326 | return Ok(()); |
|
328 | 327 | } |
|
329 | 328 | self.ensure_undecided()?; |
|
330 | 329 | |
|
331 |
let mut children: HashMap<Revision, Vec<Revision>> = |
|
|
330 | let mut children: FastHashMap<Revision, Vec<Revision>> = | |
|
331 | FastHashMap::default(); | |
|
332 | 332 | for &rev in self.undecided.as_ref().unwrap() { |
|
333 | 333 | for p in ParentsIterator::graph_parents(&self.graph, rev)? { |
|
334 | 334 | children.entry(p).or_insert_with(|| Vec::new()).push(rev); |
|
335 | 335 | } |
|
336 | 336 | } |
|
337 | 337 | self.children_cache = Some(children); |
|
338 | 338 | Ok(()) |
|
339 | 339 | } |
|
340 | 340 | |
|
341 | 341 | /// Provide statistics about the current state of the discovery process |
|
342 | 342 | pub fn stats(&self) -> DiscoveryStats { |
|
343 | 343 | DiscoveryStats { |
|
344 | 344 | undecided: self.undecided.as_ref().map(|s| s.len()), |
|
345 | 345 | } |
|
346 | 346 | } |
|
347 | 347 | |
|
348 | 348 | pub fn take_quick_sample( |
|
349 | 349 | &mut self, |
|
350 | 350 | headrevs: impl IntoIterator<Item = Revision>, |
|
351 | 351 | size: usize, |
|
352 | 352 | ) -> Result<Vec<Revision>, GraphError> { |
|
353 | 353 | self.ensure_undecided()?; |
|
354 | 354 | let mut sample = { |
|
355 | 355 | let undecided = self.undecided.as_ref().unwrap(); |
|
356 | 356 | if undecided.len() <= size { |
|
357 | 357 | return Ok(undecided.iter().cloned().collect()); |
|
358 | 358 | } |
|
359 | 359 | dagops::heads(&self.graph, undecided.iter())? |
|
360 | 360 | }; |
|
361 | 361 | if sample.len() >= size { |
|
362 | 362 | return Ok(self.limit_sample(sample.into_iter().collect(), size)); |
|
363 | 363 | } |
|
364 | 364 | update_sample( |
|
365 | 365 | None, |
|
366 | 366 | headrevs, |
|
367 | 367 | &mut sample, |
|
368 | 368 | |r| ParentsIterator::graph_parents(&self.graph, r), |
|
369 | 369 | Some(size), |
|
370 | 370 | )?; |
|
371 | 371 | Ok(sample.into_iter().collect()) |
|
372 | 372 | } |
|
373 | 373 | |
|
374 | 374 | /// Extract a sample from `self.undecided`, going from its heads and roots. |
|
375 | 375 | /// |
|
376 | 376 | /// The `size` parameter is used to avoid useless computations if |
|
377 | 377 | /// it turns out to be bigger than the whole set of undecided Revisions. |
|
378 | 378 | /// |
|
379 | 379 | /// The sample is taken by using `update_sample` from the heads, then |
|
380 | 380 | /// from the roots, working on the reverse DAG, |
|
381 | 381 | /// expressed by `self.children_cache`. |
|
382 | 382 | /// |
|
383 | 383 | /// No effort is being made to complete or limit the sample to `size` |
|
384 | 384 | /// but this method returns another interesting size that it derives |
|
385 | 385 | /// from its knowledge of the structure of the various sets, leaving |
|
386 | 386 | /// to the caller the decision to use it or not. |
|
387 | 387 | fn bidirectional_sample( |
|
388 | 388 | &mut self, |
|
389 | 389 | size: usize, |
|
390 | 390 | ) -> Result<(HashSet<Revision>, usize), GraphError> { |
|
391 | 391 | self.ensure_undecided()?; |
|
392 | 392 | { |
|
393 | 393 | // we don't want to compute children_cache before this |
|
394 | 394 | // but doing it after extracting self.undecided takes a mutable |
|
395 | 395 | // ref to self while a shareable one is still active. |
|
396 | 396 | let undecided = self.undecided.as_ref().unwrap(); |
|
397 | 397 | if undecided.len() <= size { |
|
398 | 398 | return Ok((undecided.clone(), size)); |
|
399 | 399 | } |
|
400 | 400 | } |
|
401 | 401 | |
|
402 | 402 | self.ensure_children_cache()?; |
|
403 | 403 | let revs = self.undecided.as_ref().unwrap(); |
|
404 | 404 | let mut sample: HashSet<Revision> = revs.clone(); |
|
405 | 405 | |
|
406 | 406 | // it's possible that leveraging the children cache would be more |
|
407 | 407 | // efficient here |
|
408 | 408 | dagops::retain_heads(&self.graph, &mut sample)?; |
|
409 | 409 | let revsheads = sample.clone(); // was again heads(revs) in python |
|
410 | 410 | |
|
411 | 411 | // update from heads |
|
412 | 412 | update_sample( |
|
413 | 413 | Some(revs), |
|
414 | 414 | revsheads.iter().cloned(), |
|
415 | 415 | &mut sample, |
|
416 | 416 | |r| ParentsIterator::graph_parents(&self.graph, r), |
|
417 | 417 | None, |
|
418 | 418 | )?; |
|
419 | 419 | |
|
420 | 420 | // update from roots |
|
421 | 421 | let revroots: HashSet<Revision> = |
|
422 | 422 | dagops::roots(&self.graph, revs)?.into_iter().collect(); |
|
423 | 423 | let prescribed_size = max(size, min(revroots.len(), revsheads.len())); |
|
424 | 424 | |
|
425 | 425 | let children = self.children_cache.as_ref().unwrap(); |
|
426 | 426 | let empty_vec: Vec<Revision> = Vec::new(); |
|
427 | 427 | update_sample( |
|
428 | 428 | Some(revs), |
|
429 | 429 | revroots, |
|
430 | 430 | &mut sample, |
|
431 | 431 | |r| Ok(children.get(&r).unwrap_or(&empty_vec).iter().cloned()), |
|
432 | 432 | None, |
|
433 | 433 | )?; |
|
434 | 434 | Ok((sample, prescribed_size)) |
|
435 | 435 | } |
|
436 | 436 | |
|
437 | 437 | /// Fill up sample up to the wished size with random undecided Revisions. |
|
438 | 438 | /// |
|
439 | 439 | /// This is intended to be used as a last resort completion if the |
|
440 | 440 | /// regular sampling algorithm returns too few elements. |
|
441 | 441 | fn random_complete_sample( |
|
442 | 442 | &mut self, |
|
443 | 443 | sample: &mut Vec<Revision>, |
|
444 | 444 | size: usize, |
|
445 | 445 | ) { |
|
446 | 446 | let sample_len = sample.len(); |
|
447 | 447 | if size <= sample_len { |
|
448 | 448 | return; |
|
449 | 449 | } |
|
450 | 450 | let take_from: Vec<Revision> = self |
|
451 | 451 | .undecided |
|
452 | 452 | .as_ref() |
|
453 | 453 | .unwrap() |
|
454 | 454 | .iter() |
|
455 | 455 | .filter(|&r| !sample.contains(r)) |
|
456 | 456 | .cloned() |
|
457 | 457 | .collect(); |
|
458 | 458 | sample.extend(self.limit_sample(take_from, size - sample_len)); |
|
459 | 459 | } |
|
460 | 460 | |
|
461 | 461 | pub fn take_full_sample( |
|
462 | 462 | &mut self, |
|
463 | 463 | size: usize, |
|
464 | 464 | ) -> Result<Vec<Revision>, GraphError> { |
|
465 | 465 | let (sample_set, prescribed_size) = self.bidirectional_sample(size)?; |
|
466 | 466 | let size = if self.respect_size { |
|
467 | 467 | size |
|
468 | 468 | } else { |
|
469 | 469 | prescribed_size |
|
470 | 470 | }; |
|
471 | 471 | let mut sample = |
|
472 | 472 | self.limit_sample(sample_set.into_iter().collect(), size); |
|
473 | 473 | self.random_complete_sample(&mut sample, size); |
|
474 | 474 | Ok(sample) |
|
475 | 475 | } |
|
476 | 476 | } |
|
477 | 477 | |
|
478 | 478 | #[cfg(test)] |
|
479 | 479 | mod tests { |
|
480 | 480 | use super::*; |
|
481 | 481 | use crate::testing::SampleGraph; |
|
482 | 482 | |
|
483 | 483 | /// A PartialDiscovery as for pushing all the heads of `SampleGraph` |
|
484 | 484 | /// |
|
485 | 485 | /// To avoid actual randomness in these tests, we give it a fixed |
|
486 | 486 | /// random seed, but by default we'll test the random version. |
|
487 | 487 | fn full_disco() -> PartialDiscovery<SampleGraph> { |
|
488 | 488 | PartialDiscovery::new_with_seed( |
|
489 | 489 | SampleGraph, |
|
490 | 490 | vec![10, 11, 12, 13], |
|
491 | 491 | [0; 16], |
|
492 | 492 | true, |
|
493 | 493 | true, |
|
494 | 494 | ) |
|
495 | 495 | } |
|
496 | 496 | |
|
497 | 497 | /// A PartialDiscovery as for pushing the 12 head of `SampleGraph` |
|
498 | 498 | /// |
|
499 | 499 | /// To avoid actual randomness in tests, we give it a fixed random seed. |
|
500 | 500 | fn disco12() -> PartialDiscovery<SampleGraph> { |
|
501 | 501 | PartialDiscovery::new_with_seed( |
|
502 | 502 | SampleGraph, |
|
503 | 503 | vec![12], |
|
504 | 504 | [0; 16], |
|
505 | 505 | true, |
|
506 | 506 | true, |
|
507 | 507 | ) |
|
508 | 508 | } |
|
509 | 509 | |
|
510 | 510 | fn sorted_undecided( |
|
511 | 511 | disco: &PartialDiscovery<SampleGraph>, |
|
512 | 512 | ) -> Vec<Revision> { |
|
513 | 513 | let mut as_vec: Vec<Revision> = |
|
514 | 514 | disco.undecided.as_ref().unwrap().iter().cloned().collect(); |
|
515 | 515 | as_vec.sort(); |
|
516 | 516 | as_vec |
|
517 | 517 | } |
|
518 | 518 | |
|
519 | 519 | fn sorted_missing(disco: &PartialDiscovery<SampleGraph>) -> Vec<Revision> { |
|
520 | 520 | let mut as_vec: Vec<Revision> = |
|
521 | 521 | disco.missing.iter().cloned().collect(); |
|
522 | 522 | as_vec.sort(); |
|
523 | 523 | as_vec |
|
524 | 524 | } |
|
525 | 525 | |
|
526 | 526 | fn sorted_common_heads( |
|
527 | 527 | disco: &PartialDiscovery<SampleGraph>, |
|
528 | 528 | ) -> Result<Vec<Revision>, GraphError> { |
|
529 | 529 | let mut as_vec: Vec<Revision> = |
|
530 | 530 | disco.common_heads()?.iter().cloned().collect(); |
|
531 | 531 | as_vec.sort(); |
|
532 | 532 | Ok(as_vec) |
|
533 | 533 | } |
|
534 | 534 | |
|
535 | 535 | #[test] |
|
536 | 536 | fn test_add_common_get_undecided() -> Result<(), GraphError> { |
|
537 | 537 | let mut disco = full_disco(); |
|
538 | 538 | assert_eq!(disco.undecided, None); |
|
539 | 539 | assert!(!disco.has_info()); |
|
540 | 540 | assert_eq!(disco.stats().undecided, None); |
|
541 | 541 | |
|
542 | 542 | disco.add_common_revisions(vec![11, 12])?; |
|
543 | 543 | assert!(disco.has_info()); |
|
544 | 544 | assert!(!disco.is_complete()); |
|
545 | 545 | assert!(disco.missing.is_empty()); |
|
546 | 546 | |
|
547 | 547 | // add_common_revisions did not trigger a premature computation |
|
548 | 548 | // of `undecided`, let's check that and ask for them |
|
549 | 549 | assert_eq!(disco.undecided, None); |
|
550 | 550 | disco.ensure_undecided()?; |
|
551 | 551 | assert_eq!(sorted_undecided(&disco), vec![5, 8, 10, 13]); |
|
552 | 552 | assert_eq!(disco.stats().undecided, Some(4)); |
|
553 | 553 | Ok(()) |
|
554 | 554 | } |
|
555 | 555 | |
|
556 | 556 | /// in this test, we pretend that our peer misses exactly (8+10):: |
|
557 | 557 | /// and we're comparing all our repo to it (as in a bare push) |
|
558 | 558 | #[test] |
|
559 | 559 | fn test_discovery() -> Result<(), GraphError> { |
|
560 | 560 | let mut disco = full_disco(); |
|
561 | 561 | disco.add_common_revisions(vec![11, 12])?; |
|
562 | 562 | disco.add_missing_revisions(vec![8, 10])?; |
|
563 | 563 | assert_eq!(sorted_undecided(&disco), vec![5]); |
|
564 | 564 | assert_eq!(sorted_missing(&disco), vec![8, 10, 13]); |
|
565 | 565 | assert!(!disco.is_complete()); |
|
566 | 566 | |
|
567 | 567 | disco.add_common_revisions(vec![5])?; |
|
568 | 568 | assert_eq!(sorted_undecided(&disco), vec![]); |
|
569 | 569 | assert_eq!(sorted_missing(&disco), vec![8, 10, 13]); |
|
570 | 570 | assert!(disco.is_complete()); |
|
571 | 571 | assert_eq!(sorted_common_heads(&disco)?, vec![5, 11, 12]); |
|
572 | 572 | Ok(()) |
|
573 | 573 | } |
|
574 | 574 | |
|
575 | 575 | #[test] |
|
576 | 576 | fn test_add_missing_early_continue() -> Result<(), GraphError> { |
|
577 | 577 | eprintln!("test_add_missing_early_stop"); |
|
578 | 578 | let mut disco = full_disco(); |
|
579 | 579 | disco.add_common_revisions(vec![13, 3, 4])?; |
|
580 | 580 | disco.ensure_children_cache()?; |
|
581 | 581 | // 12 is grand-child of 6 through 9 |
|
582 | 582 | // passing them in this order maximizes the chances of the |
|
583 | 583 | // early continue to do the wrong thing |
|
584 | 584 | disco.add_missing_revisions(vec![6, 9, 12])?; |
|
585 | 585 | assert_eq!(sorted_undecided(&disco), vec![5, 7, 10, 11]); |
|
586 | 586 | assert_eq!(sorted_missing(&disco), vec![6, 9, 12]); |
|
587 | 587 | assert!(!disco.is_complete()); |
|
588 | 588 | Ok(()) |
|
589 | 589 | } |
|
590 | 590 | |
|
591 | 591 | #[test] |
|
592 | 592 | fn test_limit_sample_no_need_to() { |
|
593 | 593 | let sample = vec![1, 2, 3, 4]; |
|
594 | 594 | assert_eq!(full_disco().limit_sample(sample, 10), vec![1, 2, 3, 4]); |
|
595 | 595 | } |
|
596 | 596 | |
|
597 | 597 | #[test] |
|
598 | 598 | fn test_limit_sample_less_than_half() { |
|
599 | 599 | assert_eq!(full_disco().limit_sample((1..6).collect(), 2), vec![4, 2]); |
|
600 | 600 | } |
|
601 | 601 | |
|
602 | 602 | #[test] |
|
603 | 603 | fn test_limit_sample_more_than_half() { |
|
604 | 604 | assert_eq!(full_disco().limit_sample((1..4).collect(), 2), vec![3, 2]); |
|
605 | 605 | } |
|
606 | 606 | |
|
607 | 607 | #[test] |
|
608 | 608 | fn test_limit_sample_no_random() { |
|
609 | 609 | let mut disco = full_disco(); |
|
610 | 610 | disco.randomize = false; |
|
611 | 611 | assert_eq!( |
|
612 | 612 | disco.limit_sample(vec![1, 8, 13, 5, 7, 3], 4), |
|
613 | 613 | vec![1, 3, 5, 7] |
|
614 | 614 | ); |
|
615 | 615 | } |
|
616 | 616 | |
|
617 | 617 | #[test] |
|
618 | 618 | fn test_quick_sample_enough_undecided_heads() -> Result<(), GraphError> { |
|
619 | 619 | let mut disco = full_disco(); |
|
620 | 620 | disco.undecided = Some((1..=13).collect()); |
|
621 | 621 | |
|
622 | 622 | let mut sample_vec = disco.take_quick_sample(vec![], 4)?; |
|
623 | 623 | sample_vec.sort(); |
|
624 | 624 | assert_eq!(sample_vec, vec![10, 11, 12, 13]); |
|
625 | 625 | Ok(()) |
|
626 | 626 | } |
|
627 | 627 | |
|
628 | 628 | #[test] |
|
629 | 629 | fn test_quick_sample_climbing_from_12() -> Result<(), GraphError> { |
|
630 | 630 | let mut disco = disco12(); |
|
631 | 631 | disco.ensure_undecided()?; |
|
632 | 632 | |
|
633 | 633 | let mut sample_vec = disco.take_quick_sample(vec![12], 4)?; |
|
634 | 634 | sample_vec.sort(); |
|
635 | 635 | // r12's only parent is r9, whose unique grand-parent through the |
|
636 | 636 | // diamond shape is r4. This ends there because the distance from r4 |
|
637 | 637 | // to the root is only 3. |
|
638 | 638 | assert_eq!(sample_vec, vec![4, 9, 12]); |
|
639 | 639 | Ok(()) |
|
640 | 640 | } |
|
641 | 641 | |
|
642 | 642 | #[test] |
|
643 | 643 | fn test_children_cache() -> Result<(), GraphError> { |
|
644 | 644 | let mut disco = full_disco(); |
|
645 | 645 | disco.ensure_children_cache()?; |
|
646 | 646 | |
|
647 | 647 | let cache = disco.children_cache.unwrap(); |
|
648 | 648 | assert_eq!(cache.get(&2).cloned(), Some(vec![4])); |
|
649 | 649 | assert_eq!(cache.get(&10).cloned(), None); |
|
650 | 650 | |
|
651 | 651 | let mut children_4 = cache.get(&4).cloned().unwrap(); |
|
652 | 652 | children_4.sort(); |
|
653 | 653 | assert_eq!(children_4, vec![5, 6, 7]); |
|
654 | 654 | |
|
655 | 655 | let mut children_7 = cache.get(&7).cloned().unwrap(); |
|
656 | 656 | children_7.sort(); |
|
657 | 657 | assert_eq!(children_7, vec![9, 11]); |
|
658 | 658 | |
|
659 | 659 | Ok(()) |
|
660 | 660 | } |
|
661 | 661 | |
|
662 | 662 | #[test] |
|
663 | 663 | fn test_complete_sample() { |
|
664 | 664 | let mut disco = full_disco(); |
|
665 | 665 | let undecided: HashSet<Revision> = |
|
666 | 666 | [4, 7, 9, 2, 3].iter().cloned().collect(); |
|
667 | 667 | disco.undecided = Some(undecided); |
|
668 | 668 | |
|
669 | 669 | let mut sample = vec![0]; |
|
670 | 670 | disco.random_complete_sample(&mut sample, 3); |
|
671 | 671 | assert_eq!(sample.len(), 3); |
|
672 | 672 | |
|
673 | 673 | let mut sample = vec![2, 4, 7]; |
|
674 | 674 | disco.random_complete_sample(&mut sample, 1); |
|
675 | 675 | assert_eq!(sample.len(), 3); |
|
676 | 676 | } |
|
677 | 677 | |
|
678 | 678 | #[test] |
|
679 | 679 | fn test_bidirectional_sample() -> Result<(), GraphError> { |
|
680 | 680 | let mut disco = full_disco(); |
|
681 | 681 | disco.undecided = Some((0..=13).into_iter().collect()); |
|
682 | 682 | |
|
683 | 683 | let (sample_set, size) = disco.bidirectional_sample(7)?; |
|
684 | 684 | assert_eq!(size, 7); |
|
685 | 685 | let mut sample: Vec<Revision> = sample_set.into_iter().collect(); |
|
686 | 686 | sample.sort(); |
|
687 | 687 | // our DAG is a bit too small for the results to be really interesting |
|
688 | 688 | // at least it shows that |
|
689 | 689 | // - we went both ways |
|
690 | 690 | // - we didn't take all Revisions (6 is not in the sample) |
|
691 | 691 | assert_eq!(sample, vec![0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13]); |
|
692 | 692 | Ok(()) |
|
693 | 693 | } |
|
694 | 694 | } |
@@ -1,379 +1,380 | |||
|
1 | 1 | // filepatterns.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | //! Handling of Mercurial-specific patterns. |
|
9 | 9 | |
|
10 | use crate::{utils::SliceExt, LineNumber, PatternError, PatternFileError}; | |
|
10 | use crate::{ | |
|
11 | utils::SliceExt, FastHashMap, LineNumber, PatternError, PatternFileError, | |
|
12 | }; | |
|
11 | 13 | use lazy_static::lazy_static; |
|
12 | 14 | use regex::bytes::{NoExpand, Regex}; |
|
13 | use std::collections::HashMap; | |
|
14 | 15 | use std::fs::File; |
|
15 | 16 | use std::io::Read; |
|
16 | 17 | use std::path::{Path, PathBuf}; |
|
17 | 18 | use std::vec::Vec; |
|
18 | 19 | |
|
19 | 20 | lazy_static! { |
|
20 | 21 | static ref RE_ESCAPE: Vec<Vec<u8>> = { |
|
21 | 22 | let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect(); |
|
22 | 23 | let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c"; |
|
23 | 24 | for byte in to_escape { |
|
24 | 25 | v[*byte as usize].insert(0, b'\\'); |
|
25 | 26 | } |
|
26 | 27 | v |
|
27 | 28 | }; |
|
28 | 29 | } |
|
29 | 30 | |
|
30 | 31 | /// These are matched in order |
|
31 | 32 | const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] = |
|
32 | 33 | &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")]; |
|
33 | 34 | |
|
34 | 35 | #[derive(Debug, Copy, Clone, PartialEq, Eq)] |
|
35 | 36 | pub enum PatternSyntax { |
|
36 | 37 | Regexp, |
|
37 | 38 | /// Glob that matches at the front of the path |
|
38 | 39 | RootGlob, |
|
39 | 40 | /// Glob that matches at any suffix of the path (still anchored at |
|
40 | 41 | /// slashes) |
|
41 | 42 | Glob, |
|
42 | 43 | Path, |
|
43 | 44 | RelPath, |
|
44 | 45 | RelGlob, |
|
45 | 46 | RelRegexp, |
|
46 | 47 | RootFiles, |
|
47 | 48 | } |
|
48 | 49 | |
|
49 | 50 | /// Transforms a glob pattern into a regex |
|
50 | 51 | fn glob_to_re(pat: &[u8]) -> Vec<u8> { |
|
51 | 52 | let mut input = pat; |
|
52 | 53 | let mut res: Vec<u8> = vec![]; |
|
53 | 54 | let mut group_depth = 0; |
|
54 | 55 | |
|
55 | 56 | while let Some((c, rest)) = input.split_first() { |
|
56 | 57 | input = rest; |
|
57 | 58 | |
|
58 | 59 | match c { |
|
59 | 60 | b'*' => { |
|
60 | 61 | for (source, repl) in GLOB_REPLACEMENTS { |
|
61 | 62 | if let Some(rest) = input.drop_prefix(source) { |
|
62 | 63 | input = rest; |
|
63 | 64 | res.extend(*repl); |
|
64 | 65 | break; |
|
65 | 66 | } |
|
66 | 67 | } |
|
67 | 68 | } |
|
68 | 69 | b'?' => res.extend(b"."), |
|
69 | 70 | b'[' => { |
|
70 | 71 | match input.iter().skip(1).position(|b| *b == b']') { |
|
71 | 72 | None => res.extend(b"\\["), |
|
72 | 73 | Some(end) => { |
|
73 | 74 | // Account for the one we skipped |
|
74 | 75 | let end = end + 1; |
|
75 | 76 | |
|
76 | 77 | res.extend(b"["); |
|
77 | 78 | |
|
78 | 79 | for (i, b) in input[..end].iter().enumerate() { |
|
79 | 80 | if *b == b'!' && i == 0 { |
|
80 | 81 | res.extend(b"^") |
|
81 | 82 | } else if *b == b'^' && i == 0 { |
|
82 | 83 | res.extend(b"\\^") |
|
83 | 84 | } else if *b == b'\\' { |
|
84 | 85 | res.extend(b"\\\\") |
|
85 | 86 | } else { |
|
86 | 87 | res.push(*b) |
|
87 | 88 | } |
|
88 | 89 | } |
|
89 | 90 | res.extend(b"]"); |
|
90 | 91 | input = &input[end + 1..]; |
|
91 | 92 | } |
|
92 | 93 | } |
|
93 | 94 | } |
|
94 | 95 | b'{' => { |
|
95 | 96 | group_depth += 1; |
|
96 | 97 | res.extend(b"(?:") |
|
97 | 98 | } |
|
98 | 99 | b'}' if group_depth > 0 => { |
|
99 | 100 | group_depth -= 1; |
|
100 | 101 | res.extend(b")"); |
|
101 | 102 | } |
|
102 | 103 | b',' if group_depth > 0 => res.extend(b"|"), |
|
103 | 104 | b'\\' => { |
|
104 | 105 | let c = { |
|
105 | 106 | if let Some((c, rest)) = input.split_first() { |
|
106 | 107 | input = rest; |
|
107 | 108 | c |
|
108 | 109 | } else { |
|
109 | 110 | c |
|
110 | 111 | } |
|
111 | 112 | }; |
|
112 | 113 | res.extend(&RE_ESCAPE[*c as usize]) |
|
113 | 114 | } |
|
114 | 115 | _ => res.extend(&RE_ESCAPE[*c as usize]), |
|
115 | 116 | } |
|
116 | 117 | } |
|
117 | 118 | res |
|
118 | 119 | } |
|
119 | 120 | |
|
120 | 121 | fn escape_pattern(pattern: &[u8]) -> Vec<u8> { |
|
121 | 122 | pattern |
|
122 | 123 | .iter() |
|
123 | 124 | .flat_map(|c| RE_ESCAPE[*c as usize].clone()) |
|
124 | 125 | .collect() |
|
125 | 126 | } |
|
126 | 127 | |
|
127 | 128 | fn parse_pattern_syntax(kind: &[u8]) -> Result<PatternSyntax, PatternError> { |
|
128 | 129 | match kind { |
|
129 | 130 | b"re" => Ok(PatternSyntax::Regexp), |
|
130 | 131 | b"path" => Ok(PatternSyntax::Path), |
|
131 | 132 | b"relpath" => Ok(PatternSyntax::RelPath), |
|
132 | 133 | b"rootfilesin" => Ok(PatternSyntax::RootFiles), |
|
133 | 134 | b"relglob" => Ok(PatternSyntax::RelGlob), |
|
134 | 135 | b"relre" => Ok(PatternSyntax::RelRegexp), |
|
135 | 136 | b"glob" => Ok(PatternSyntax::Glob), |
|
136 | 137 | b"rootglob" => Ok(PatternSyntax::RootGlob), |
|
137 | 138 | _ => Err(PatternError::UnsupportedSyntax( |
|
138 | 139 | String::from_utf8_lossy(kind).to_string(), |
|
139 | 140 | )), |
|
140 | 141 | } |
|
141 | 142 | } |
|
142 | 143 | |
|
143 | 144 | /// Builds the regex that corresponds to the given pattern. |
|
144 | 145 | /// If within a `syntax: regexp` context, returns the pattern, |
|
145 | 146 | /// otherwise, returns the corresponding regex. |
|
146 | 147 | fn _build_single_regex( |
|
147 | 148 | syntax: PatternSyntax, |
|
148 | 149 | pattern: &[u8], |
|
149 | 150 | globsuffix: &[u8], |
|
150 | 151 | ) -> Vec<u8> { |
|
151 | 152 | if pattern.is_empty() { |
|
152 | 153 | return vec![]; |
|
153 | 154 | } |
|
154 | 155 | match syntax { |
|
155 | 156 | PatternSyntax::Regexp => pattern.to_owned(), |
|
156 | 157 | PatternSyntax::RelRegexp => { |
|
157 | 158 | if pattern[0] == b'^' { |
|
158 | 159 | return pattern.to_owned(); |
|
159 | 160 | } |
|
160 | 161 | [b".*", pattern].concat() |
|
161 | 162 | } |
|
162 | 163 | PatternSyntax::Path | PatternSyntax::RelPath => { |
|
163 | 164 | if pattern == b"." { |
|
164 | 165 | return vec![]; |
|
165 | 166 | } |
|
166 | 167 | [escape_pattern(pattern).as_slice(), b"(?:/|$)"].concat() |
|
167 | 168 | } |
|
168 | 169 | PatternSyntax::RootFiles => { |
|
169 | 170 | let mut res = if pattern == b"." { |
|
170 | 171 | vec![] |
|
171 | 172 | } else { |
|
172 | 173 | // Pattern is a directory name. |
|
173 | 174 | [escape_pattern(pattern).as_slice(), b"/"].concat() |
|
174 | 175 | }; |
|
175 | 176 | |
|
176 | 177 | // Anything after the pattern must be a non-directory. |
|
177 | 178 | res.extend(b"[^/]+$"); |
|
178 | 179 | res |
|
179 | 180 | } |
|
180 | 181 | PatternSyntax::RelGlob => { |
|
181 | 182 | let glob_re = glob_to_re(pattern); |
|
182 | 183 | if let Some(rest) = glob_re.drop_prefix(b"[^/]*") { |
|
183 | 184 | [b".*", rest, globsuffix].concat() |
|
184 | 185 | } else { |
|
185 | 186 | [b"(?:|.*/)", glob_re.as_slice(), globsuffix].concat() |
|
186 | 187 | } |
|
187 | 188 | } |
|
188 | 189 | PatternSyntax::Glob | PatternSyntax::RootGlob => { |
|
189 | 190 | [glob_to_re(pattern).as_slice(), globsuffix].concat() |
|
190 | 191 | } |
|
191 | 192 | } |
|
192 | 193 | } |
|
193 | 194 | |
|
194 | 195 | const GLOB_SPECIAL_CHARACTERS: [u8; 7] = |
|
195 | 196 | [b'*', b'?', b'[', b']', b'{', b'}', b'\\']; |
|
196 | 197 | |
|
197 | 198 | /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs |
|
198 | 199 | /// that don't need to be transformed into a regex. |
|
199 | 200 | pub fn build_single_regex( |
|
200 | 201 | kind: &[u8], |
|
201 | 202 | pat: &[u8], |
|
202 | 203 | globsuffix: &[u8], |
|
203 | 204 | ) -> Result<Vec<u8>, PatternError> { |
|
204 | 205 | let enum_kind = parse_pattern_syntax(kind)?; |
|
205 | 206 | if enum_kind == PatternSyntax::RootGlob |
|
206 | 207 | && !pat.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b)) |
|
207 | 208 | { |
|
208 | 209 | let mut escaped = escape_pattern(pat); |
|
209 | 210 | escaped.extend(b"(?:/|$)"); |
|
210 | 211 | Ok(escaped) |
|
211 | 212 | } else { |
|
212 | 213 | Ok(_build_single_regex(enum_kind, pat, globsuffix)) |
|
213 | 214 | } |
|
214 | 215 | } |
|
215 | 216 | |
|
216 | 217 | lazy_static! { |
|
217 | static ref SYNTAXES: HashMap<&'static [u8], &'static [u8]> = { | |
|
218 |
let mut m = HashMap:: |
|
|
218 | static ref SYNTAXES: FastHashMap<&'static [u8], &'static [u8]> = { | |
|
219 | let mut m = FastHashMap::default(); | |
|
219 | 220 | |
|
220 | 221 | m.insert(b"re".as_ref(), b"relre:".as_ref()); |
|
221 | 222 | m.insert(b"regexp".as_ref(), b"relre:".as_ref()); |
|
222 | 223 | m.insert(b"glob".as_ref(), b"relglob:".as_ref()); |
|
223 | 224 | m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref()); |
|
224 | 225 | m.insert(b"include".as_ref(), b"include".as_ref()); |
|
225 | 226 | m.insert(b"subinclude".as_ref(), b"subinclude".as_ref()); |
|
226 | 227 | m |
|
227 | 228 | }; |
|
228 | 229 | } |
|
229 | 230 | |
|
230 | 231 | pub type PatternTuple = (Vec<u8>, LineNumber, Vec<u8>); |
|
231 | 232 | type WarningTuple = (PathBuf, Vec<u8>); |
|
232 | 233 | |
|
233 | 234 | pub fn parse_pattern_file_contents<P: AsRef<Path>>( |
|
234 | 235 | lines: &[u8], |
|
235 | 236 | file_path: P, |
|
236 | 237 | warn: bool, |
|
237 | 238 | ) -> (Vec<PatternTuple>, Vec<WarningTuple>) { |
|
238 | 239 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); |
|
239 | 240 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); |
|
240 | 241 | let mut inputs: Vec<PatternTuple> = vec![]; |
|
241 | 242 | let mut warnings: Vec<WarningTuple> = vec![]; |
|
242 | 243 | |
|
243 | 244 | let mut current_syntax = b"relre:".as_ref(); |
|
244 | 245 | |
|
245 | 246 | for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() { |
|
246 | 247 | let line_number = line_number + 1; |
|
247 | 248 | |
|
248 | 249 | let line_buf; |
|
249 | 250 | if line.contains(&b'#') { |
|
250 | 251 | if let Some(cap) = comment_regex.captures(line) { |
|
251 | 252 | line = &line[..cap.get(1).unwrap().end()] |
|
252 | 253 | } |
|
253 | 254 | line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#")); |
|
254 | 255 | line = &line_buf; |
|
255 | 256 | } |
|
256 | 257 | |
|
257 | 258 | let mut line = line.trim_end(); |
|
258 | 259 | |
|
259 | 260 | if line.is_empty() { |
|
260 | 261 | continue; |
|
261 | 262 | } |
|
262 | 263 | |
|
263 | 264 | if let Some(syntax) = line.drop_prefix(b"syntax:") { |
|
264 | 265 | let syntax = syntax.trim(); |
|
265 | 266 | |
|
266 | 267 | if let Some(rel_syntax) = SYNTAXES.get(syntax) { |
|
267 | 268 | current_syntax = rel_syntax; |
|
268 | 269 | } else if warn { |
|
269 | 270 | warnings |
|
270 | 271 | .push((file_path.as_ref().to_owned(), syntax.to_owned())); |
|
271 | 272 | } |
|
272 | 273 | continue; |
|
273 | 274 | } |
|
274 | 275 | |
|
275 | 276 | let mut line_syntax: &[u8] = ¤t_syntax; |
|
276 | 277 | |
|
277 | 278 | for (s, rels) in SYNTAXES.iter() { |
|
278 | 279 | if let Some(rest) = line.drop_prefix(rels) { |
|
279 | 280 | line_syntax = rels; |
|
280 | 281 | line = rest; |
|
281 | 282 | break; |
|
282 | 283 | } |
|
283 | 284 | if let Some(rest) = line.drop_prefix(&[s, &b":"[..]].concat()) { |
|
284 | 285 | line_syntax = rels; |
|
285 | 286 | line = rest; |
|
286 | 287 | break; |
|
287 | 288 | } |
|
288 | 289 | } |
|
289 | 290 | |
|
290 | 291 | inputs.push(( |
|
291 | 292 | [line_syntax, line].concat(), |
|
292 | 293 | line_number, |
|
293 | 294 | line.to_owned(), |
|
294 | 295 | )); |
|
295 | 296 | } |
|
296 | 297 | (inputs, warnings) |
|
297 | 298 | } |
|
298 | 299 | |
|
299 | 300 | pub fn read_pattern_file<P: AsRef<Path>>( |
|
300 | 301 | file_path: P, |
|
301 | 302 | warn: bool, |
|
302 | 303 | ) -> Result<(Vec<PatternTuple>, Vec<WarningTuple>), PatternFileError> { |
|
303 | 304 | let mut f = File::open(file_path.as_ref())?; |
|
304 | 305 | let mut contents = Vec::new(); |
|
305 | 306 | |
|
306 | 307 | f.read_to_end(&mut contents)?; |
|
307 | 308 | |
|
308 | 309 | Ok(parse_pattern_file_contents(&contents, file_path, warn)) |
|
309 | 310 | } |
|
310 | 311 | |
|
311 | 312 | #[cfg(test)] |
|
312 | 313 | mod tests { |
|
313 | 314 | use super::*; |
|
314 | 315 | |
|
315 | 316 | #[test] |
|
316 | 317 | fn escape_pattern_test() { |
|
317 | 318 | let untouched = br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#; |
|
318 | 319 | assert_eq!(escape_pattern(untouched), untouched.to_vec()); |
|
319 | 320 | // All escape codes |
|
320 | 321 | assert_eq!( |
|
321 | 322 | escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#), |
|
322 | 323 | br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"# |
|
323 | 324 | .to_vec() |
|
324 | 325 | ); |
|
325 | 326 | } |
|
326 | 327 | |
|
327 | 328 | #[test] |
|
328 | 329 | fn glob_test() { |
|
329 | 330 | assert_eq!(glob_to_re(br#"?"#), br#"."#); |
|
330 | 331 | assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#); |
|
331 | 332 | assert_eq!(glob_to_re(br#"**"#), br#".*"#); |
|
332 | 333 | assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#); |
|
333 | 334 | assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#); |
|
334 | 335 | assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#); |
|
335 | 336 | assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#); |
|
336 | 337 | assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#); |
|
337 | 338 | } |
|
338 | 339 | |
|
339 | 340 | #[test] |
|
340 | 341 | fn test_parse_pattern_file_contents() { |
|
341 | 342 | let lines = b"syntax: glob\n*.elc"; |
|
342 | 343 | |
|
343 | 344 | assert_eq!( |
|
344 | 345 | vec![(b"relglob:*.elc".to_vec(), 2, b"*.elc".to_vec())], |
|
345 | 346 | parse_pattern_file_contents(lines, Path::new("file_path"), false) |
|
346 | 347 | .0, |
|
347 | 348 | ); |
|
348 | 349 | |
|
349 | 350 | let lines = b"syntax: include\nsyntax: glob"; |
|
350 | 351 | |
|
351 | 352 | assert_eq!( |
|
352 | 353 | parse_pattern_file_contents(lines, Path::new("file_path"), false) |
|
353 | 354 | .0, |
|
354 | 355 | vec![] |
|
355 | 356 | ); |
|
356 | 357 | let lines = b"glob:**.o"; |
|
357 | 358 | assert_eq!( |
|
358 | 359 | parse_pattern_file_contents(lines, Path::new("file_path"), false) |
|
359 | 360 | .0, |
|
360 | 361 | vec![(b"relglob:**.o".to_vec(), 1, b"**.o".to_vec())] |
|
361 | 362 | ); |
|
362 | 363 | } |
|
363 | 364 | |
|
364 | 365 | #[test] |
|
365 | 366 | fn test_build_single_regex_shortcut() { |
|
366 | 367 | assert_eq!( |
|
367 | 368 | br"(?:/|$)".to_vec(), |
|
368 | 369 | build_single_regex(b"rootglob", b"", b"").unwrap() |
|
369 | 370 | ); |
|
370 | 371 | assert_eq!( |
|
371 | 372 | br"whatever(?:/|$)".to_vec(), |
|
372 | 373 | build_single_regex(b"rootglob", b"whatever", b"").unwrap() |
|
373 | 374 | ); |
|
374 | 375 | assert_eq!( |
|
375 | 376 | br"[^/]*\.o".to_vec(), |
|
376 | 377 | build_single_regex(b"rootglob", b"*.o", b"").unwrap() |
|
377 | 378 | ); |
|
378 | 379 | } |
|
379 | 380 | } |
@@ -1,166 +1,173 | |||
|
1 | 1 | // Copyright 2018 Georges Racinet <gracinet@anybox.fr> |
|
2 | 2 | // |
|
3 | 3 | // This software may be used and distributed according to the terms of the |
|
4 | 4 | // GNU General Public License version 2 or any later version. |
|
5 | 5 | mod ancestors; |
|
6 | 6 | pub mod dagops; |
|
7 | 7 | pub use ancestors::{AncestorsIterator, LazyAncestors, MissingAncestors}; |
|
8 | 8 | mod dirstate; |
|
9 | 9 | pub mod discovery; |
|
10 | 10 | pub mod testing; // unconditionally built, for use from integration tests |
|
11 | 11 | pub use dirstate::{ |
|
12 | 12 | dirs_multiset::{DirsMultiset, DirsMultisetIter}, |
|
13 | 13 | dirstate_map::DirstateMap, |
|
14 | 14 | parsers::{pack_dirstate, parse_dirstate, PARENT_SIZE}, |
|
15 | 15 | status::status, |
|
16 | 16 | CopyMap, CopyMapIter, DirstateEntry, DirstateParents, EntryState, |
|
17 | 17 | StateMap, StateMapIter, |
|
18 | 18 | }; |
|
19 | 19 | mod filepatterns; |
|
20 | 20 | pub mod matchers; |
|
21 | 21 | pub mod utils; |
|
22 | 22 | |
|
23 | 23 | use crate::utils::hg_path::HgPathBuf; |
|
24 | 24 | pub use filepatterns::{ |
|
25 | 25 | build_single_regex, read_pattern_file, PatternSyntax, PatternTuple, |
|
26 | 26 | }; |
|
27 | use std::collections::HashMap; | |
|
28 | use twox_hash::RandomXxHashBuilder64; | |
|
27 | 29 | |
|
28 | 30 | /// Mercurial revision numbers |
|
29 | 31 | /// |
|
30 | 32 | /// As noted in revlog.c, revision numbers are actually encoded in |
|
31 | 33 | /// 4 bytes, and are liberally converted to ints, whence the i32 |
|
32 | 34 | pub type Revision = i32; |
|
33 | 35 | |
|
34 | 36 | /// Marker expressing the absence of a parent |
|
35 | 37 | /// |
|
36 | 38 | /// Independently of the actual representation, `NULL_REVISION` is guaranteed |
|
37 | 39 | /// to be smaller that all existing revisions. |
|
38 | 40 | pub const NULL_REVISION: Revision = -1; |
|
39 | 41 | |
|
40 | 42 | /// Same as `mercurial.node.wdirrev` |
|
41 | 43 | /// |
|
42 | 44 | /// This is also equal to `i32::max_value()`, but it's better to spell |
|
43 | 45 | /// it out explicitely, same as in `mercurial.node` |
|
44 | 46 | pub const WORKING_DIRECTORY_REVISION: Revision = 0x7fffffff; |
|
45 | 47 | |
|
46 | 48 | /// The simplest expression of what we need of Mercurial DAGs. |
|
47 | 49 | pub trait Graph { |
|
48 | 50 | /// Return the two parents of the given `Revision`. |
|
49 | 51 | /// |
|
50 | 52 | /// Each of the parents can be independently `NULL_REVISION` |
|
51 | 53 | fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError>; |
|
52 | 54 | } |
|
53 | 55 | |
|
54 | 56 | pub type LineNumber = usize; |
|
55 | 57 | |
|
58 | /// Rust's default hasher is too slow because it tries to prevent collision | |
|
59 | /// attacks. We are not concerned about those: if an ill-minded person has | |
|
60 | /// write access to your repository, you have other issues. | |
|
61 | pub type FastHashMap<K, V> = HashMap<K, V, RandomXxHashBuilder64>; | |
|
62 | ||
|
56 | 63 | #[derive(Clone, Debug, PartialEq)] |
|
57 | 64 | pub enum GraphError { |
|
58 | 65 | ParentOutOfRange(Revision), |
|
59 | 66 | WorkingDirectoryUnsupported, |
|
60 | 67 | } |
|
61 | 68 | |
|
62 | 69 | #[derive(Clone, Debug, PartialEq)] |
|
63 | 70 | pub enum DirstateParseError { |
|
64 | 71 | TooLittleData, |
|
65 | 72 | Overflow, |
|
66 | 73 | CorruptedEntry(String), |
|
67 | 74 | Damaged, |
|
68 | 75 | } |
|
69 | 76 | |
|
70 | 77 | impl From<std::io::Error> for DirstateParseError { |
|
71 | 78 | fn from(e: std::io::Error) -> Self { |
|
72 | 79 | DirstateParseError::CorruptedEntry(e.to_string()) |
|
73 | 80 | } |
|
74 | 81 | } |
|
75 | 82 | |
|
76 | 83 | impl ToString for DirstateParseError { |
|
77 | 84 | fn to_string(&self) -> String { |
|
78 | 85 | use crate::DirstateParseError::*; |
|
79 | 86 | match self { |
|
80 | 87 | TooLittleData => "Too little data for dirstate.".to_string(), |
|
81 | 88 | Overflow => "Overflow in dirstate.".to_string(), |
|
82 | 89 | CorruptedEntry(e) => format!("Corrupted entry: {:?}.", e), |
|
83 | 90 | Damaged => "Dirstate appears to be damaged.".to_string(), |
|
84 | 91 | } |
|
85 | 92 | } |
|
86 | 93 | } |
|
87 | 94 | |
|
88 | 95 | #[derive(Debug, PartialEq)] |
|
89 | 96 | pub enum DirstatePackError { |
|
90 | 97 | CorruptedEntry(String), |
|
91 | 98 | CorruptedParent, |
|
92 | 99 | BadSize(usize, usize), |
|
93 | 100 | } |
|
94 | 101 | |
|
95 | 102 | impl From<std::io::Error> for DirstatePackError { |
|
96 | 103 | fn from(e: std::io::Error) -> Self { |
|
97 | 104 | DirstatePackError::CorruptedEntry(e.to_string()) |
|
98 | 105 | } |
|
99 | 106 | } |
|
100 | 107 | #[derive(Debug, PartialEq)] |
|
101 | 108 | pub enum DirstateMapError { |
|
102 | 109 | PathNotFound(HgPathBuf), |
|
103 | 110 | EmptyPath, |
|
104 | 111 | ConsecutiveSlashes, |
|
105 | 112 | } |
|
106 | 113 | |
|
107 | 114 | impl ToString for DirstateMapError { |
|
108 | 115 | fn to_string(&self) -> String { |
|
109 | 116 | use crate::DirstateMapError::*; |
|
110 | 117 | match self { |
|
111 | 118 | PathNotFound(_) => "expected a value, found none".to_string(), |
|
112 | 119 | EmptyPath => "Overflow in dirstate.".to_string(), |
|
113 | 120 | ConsecutiveSlashes => { |
|
114 | 121 | "found invalid consecutive slashes in path".to_string() |
|
115 | 122 | } |
|
116 | 123 | } |
|
117 | 124 | } |
|
118 | 125 | } |
|
119 | 126 | |
|
120 | 127 | pub enum DirstateError { |
|
121 | 128 | Parse(DirstateParseError), |
|
122 | 129 | Pack(DirstatePackError), |
|
123 | 130 | Map(DirstateMapError), |
|
124 | 131 | IO(std::io::Error), |
|
125 | 132 | } |
|
126 | 133 | |
|
127 | 134 | impl From<DirstateParseError> for DirstateError { |
|
128 | 135 | fn from(e: DirstateParseError) -> Self { |
|
129 | 136 | DirstateError::Parse(e) |
|
130 | 137 | } |
|
131 | 138 | } |
|
132 | 139 | |
|
133 | 140 | impl From<DirstatePackError> for DirstateError { |
|
134 | 141 | fn from(e: DirstatePackError) -> Self { |
|
135 | 142 | DirstateError::Pack(e) |
|
136 | 143 | } |
|
137 | 144 | } |
|
138 | 145 | |
|
139 | 146 | #[derive(Debug)] |
|
140 | 147 | pub enum PatternError { |
|
141 | 148 | UnsupportedSyntax(String), |
|
142 | 149 | } |
|
143 | 150 | |
|
144 | 151 | #[derive(Debug)] |
|
145 | 152 | pub enum PatternFileError { |
|
146 | 153 | IO(std::io::Error), |
|
147 | 154 | Pattern(PatternError, LineNumber), |
|
148 | 155 | } |
|
149 | 156 | |
|
150 | 157 | impl From<std::io::Error> for PatternFileError { |
|
151 | 158 | fn from(e: std::io::Error) -> Self { |
|
152 | 159 | PatternFileError::IO(e) |
|
153 | 160 | } |
|
154 | 161 | } |
|
155 | 162 | |
|
156 | 163 | impl From<DirstateMapError> for DirstateError { |
|
157 | 164 | fn from(e: DirstateMapError) -> Self { |
|
158 | 165 | DirstateError::Map(e) |
|
159 | 166 | } |
|
160 | 167 | } |
|
161 | 168 | |
|
162 | 169 | impl From<std::io::Error> for DirstateError { |
|
163 | 170 | fn from(e: std::io::Error) -> Self { |
|
164 | 171 | DirstateError::IO(e) |
|
165 | 172 | } |
|
166 | 173 | } |
@@ -1,175 +1,175 | |||
|
1 | 1 | // parsers.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | //! Bindings for the `hg::dirstate::parsers` module provided by the |
|
9 | 9 | //! `hg-core` package. |
|
10 | 10 | //! |
|
11 | 11 | //! From Python, this will be seen as `mercurial.rustext.parsers` |
|
12 | 12 | use cpython::{ |
|
13 | 13 | exc, PyBytes, PyDict, PyErr, PyInt, PyModule, PyResult, PyTuple, Python, |
|
14 | 14 | PythonObject, ToPyObject, |
|
15 | 15 | }; |
|
16 | 16 | use hg::{ |
|
17 | 17 | pack_dirstate, parse_dirstate, utils::hg_path::HgPathBuf, |
|
18 |
DirstatePackError, DirstateParents, DirstateParseError, |
|
|
18 | DirstatePackError, DirstateParents, DirstateParseError, FastHashMap, | |
|
19 | PARENT_SIZE, | |
|
19 | 20 | }; |
|
20 | use std::collections::HashMap; | |
|
21 | 21 | use std::convert::TryInto; |
|
22 | 22 | |
|
23 | 23 | use crate::dirstate::{extract_dirstate, make_dirstate_tuple}; |
|
24 | 24 | use std::time::Duration; |
|
25 | 25 | |
|
26 | 26 | fn parse_dirstate_wrapper( |
|
27 | 27 | py: Python, |
|
28 | 28 | dmap: PyDict, |
|
29 | 29 | copymap: PyDict, |
|
30 | 30 | st: PyBytes, |
|
31 | 31 | ) -> PyResult<PyTuple> { |
|
32 |
let mut dirstate_map = HashMap:: |
|
|
33 |
let mut copies = HashMap:: |
|
|
32 | let mut dirstate_map = FastHashMap::default(); | |
|
33 | let mut copies = FastHashMap::default(); | |
|
34 | 34 | |
|
35 | 35 | match parse_dirstate(&mut dirstate_map, &mut copies, st.data(py)) { |
|
36 | 36 | Ok(parents) => { |
|
37 | 37 | for (filename, entry) in &dirstate_map { |
|
38 | 38 | dmap.set_item( |
|
39 | 39 | py, |
|
40 | 40 | PyBytes::new(py, filename.as_ref()), |
|
41 | 41 | make_dirstate_tuple(py, entry)?, |
|
42 | 42 | )?; |
|
43 | 43 | } |
|
44 | 44 | for (path, copy_path) in copies { |
|
45 | 45 | copymap.set_item( |
|
46 | 46 | py, |
|
47 | 47 | PyBytes::new(py, path.as_ref()), |
|
48 | 48 | PyBytes::new(py, copy_path.as_ref()), |
|
49 | 49 | )?; |
|
50 | 50 | } |
|
51 | 51 | Ok( |
|
52 | 52 | (PyBytes::new(py, &parents.p1), PyBytes::new(py, &parents.p2)) |
|
53 | 53 | .to_py_object(py), |
|
54 | 54 | ) |
|
55 | 55 | } |
|
56 | 56 | Err(e) => Err(PyErr::new::<exc::ValueError, _>( |
|
57 | 57 | py, |
|
58 | 58 | match e { |
|
59 | 59 | DirstateParseError::TooLittleData => { |
|
60 | 60 | "too little data for parents".to_string() |
|
61 | 61 | } |
|
62 | 62 | DirstateParseError::Overflow => { |
|
63 | 63 | "overflow in dirstate".to_string() |
|
64 | 64 | } |
|
65 | 65 | DirstateParseError::CorruptedEntry(e) => e, |
|
66 | 66 | DirstateParseError::Damaged => { |
|
67 | 67 | "dirstate appears to be damaged".to_string() |
|
68 | 68 | } |
|
69 | 69 | }, |
|
70 | 70 | )), |
|
71 | 71 | } |
|
72 | 72 | } |
|
73 | 73 | |
|
74 | 74 | fn pack_dirstate_wrapper( |
|
75 | 75 | py: Python, |
|
76 | 76 | dmap: PyDict, |
|
77 | 77 | copymap: PyDict, |
|
78 | 78 | pl: PyTuple, |
|
79 | 79 | now: PyInt, |
|
80 | 80 | ) -> PyResult<PyBytes> { |
|
81 | 81 | let p1 = pl.get_item(py, 0).extract::<PyBytes>(py)?; |
|
82 | 82 | let p1: &[u8] = p1.data(py); |
|
83 | 83 | let p2 = pl.get_item(py, 1).extract::<PyBytes>(py)?; |
|
84 | 84 | let p2: &[u8] = p2.data(py); |
|
85 | 85 | |
|
86 | 86 | let mut dirstate_map = extract_dirstate(py, &dmap)?; |
|
87 | 87 | |
|
88 | let copies: Result<HashMap<HgPathBuf, HgPathBuf>, PyErr> = copymap | |
|
88 | let copies: Result<FastHashMap<HgPathBuf, HgPathBuf>, PyErr> = copymap | |
|
89 | 89 | .items(py) |
|
90 | 90 | .iter() |
|
91 | 91 | .map(|(key, value)| { |
|
92 | 92 | Ok(( |
|
93 | 93 | HgPathBuf::from_bytes(key.extract::<PyBytes>(py)?.data(py)), |
|
94 | 94 | HgPathBuf::from_bytes(value.extract::<PyBytes>(py)?.data(py)), |
|
95 | 95 | )) |
|
96 | 96 | }) |
|
97 | 97 | .collect(); |
|
98 | 98 | |
|
99 | 99 | if p1.len() != PARENT_SIZE || p2.len() != PARENT_SIZE { |
|
100 | 100 | return Err(PyErr::new::<exc::ValueError, _>( |
|
101 | 101 | py, |
|
102 | 102 | "expected a 20-byte hash".to_string(), |
|
103 | 103 | )); |
|
104 | 104 | } |
|
105 | 105 | |
|
106 | 106 | match pack_dirstate( |
|
107 | 107 | &mut dirstate_map, |
|
108 | 108 | &copies?, |
|
109 | 109 | DirstateParents { |
|
110 | 110 | p1: p1.try_into().unwrap(), |
|
111 | 111 | p2: p2.try_into().unwrap(), |
|
112 | 112 | }, |
|
113 | 113 | Duration::from_secs(now.as_object().extract::<u64>(py)?), |
|
114 | 114 | ) { |
|
115 | 115 | Ok(packed) => { |
|
116 | 116 | for (filename, entry) in &dirstate_map { |
|
117 | 117 | dmap.set_item( |
|
118 | 118 | py, |
|
119 | 119 | PyBytes::new(py, filename.as_ref()), |
|
120 | 120 | make_dirstate_tuple(py, entry)?, |
|
121 | 121 | )?; |
|
122 | 122 | } |
|
123 | 123 | Ok(PyBytes::new(py, &packed)) |
|
124 | 124 | } |
|
125 | 125 | Err(error) => Err(PyErr::new::<exc::ValueError, _>( |
|
126 | 126 | py, |
|
127 | 127 | match error { |
|
128 | 128 | DirstatePackError::CorruptedParent => { |
|
129 | 129 | "expected a 20-byte hash".to_string() |
|
130 | 130 | } |
|
131 | 131 | DirstatePackError::CorruptedEntry(e) => e, |
|
132 | 132 | DirstatePackError::BadSize(expected, actual) => { |
|
133 | 133 | format!("bad dirstate size: {} != {}", actual, expected) |
|
134 | 134 | } |
|
135 | 135 | }, |
|
136 | 136 | )), |
|
137 | 137 | } |
|
138 | 138 | } |
|
139 | 139 | |
|
140 | 140 | /// Create the module, with `__package__` given from parent |
|
141 | 141 | pub fn init_parsers_module(py: Python, package: &str) -> PyResult<PyModule> { |
|
142 | 142 | let dotted_name = &format!("{}.parsers", package); |
|
143 | 143 | let m = PyModule::new(py, dotted_name)?; |
|
144 | 144 | |
|
145 | 145 | m.add(py, "__package__", package)?; |
|
146 | 146 | m.add(py, "__doc__", "Parsers - Rust implementation")?; |
|
147 | 147 | |
|
148 | 148 | m.add( |
|
149 | 149 | py, |
|
150 | 150 | "parse_dirstate", |
|
151 | 151 | py_fn!( |
|
152 | 152 | py, |
|
153 | 153 | parse_dirstate_wrapper(dmap: PyDict, copymap: PyDict, st: PyBytes) |
|
154 | 154 | ), |
|
155 | 155 | )?; |
|
156 | 156 | m.add( |
|
157 | 157 | py, |
|
158 | 158 | "pack_dirstate", |
|
159 | 159 | py_fn!( |
|
160 | 160 | py, |
|
161 | 161 | pack_dirstate_wrapper( |
|
162 | 162 | dmap: PyDict, |
|
163 | 163 | copymap: PyDict, |
|
164 | 164 | pl: PyTuple, |
|
165 | 165 | now: PyInt |
|
166 | 166 | ) |
|
167 | 167 | ), |
|
168 | 168 | )?; |
|
169 | 169 | |
|
170 | 170 | let sys = PyModule::import(py, "sys")?; |
|
171 | 171 | let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?; |
|
172 | 172 | sys_modules.set_item(py, dotted_name, &m)?; |
|
173 | 173 | |
|
174 | 174 | Ok(m) |
|
175 | 175 | } |
General Comments 0
You need to be logged in to leave comments.
Login now