##// END OF EJS Templates
branching: merge default into stable
Raphaël Gomès -
r50646:288de6f5 merge 6.2rc0 stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

This diff has been collapsed as it changes many lines, (560 lines changed) Show them Hide them
@@ -0,0 +1,560 b''
1 # This file is automatically @generated by Cargo.
2 # It is not intended for manual editing.
3 version = 3
4
5 [[package]]
6 name = "aho-corasick"
7 version = "0.7.18"
8 source = "registry+https://github.com/rust-lang/crates.io-index"
9 checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
10 dependencies = [
11 "memchr",
12 ]
13
14 [[package]]
15 name = "assert_cmd"
16 version = "2.0.4"
17 source = "registry+https://github.com/rust-lang/crates.io-index"
18 checksum = "93ae1ddd39efd67689deb1979d80bad3bf7f2b09c6e6117c8d1f2443b5e2f83e"
19 dependencies = [
20 "bstr",
21 "doc-comment",
22 "predicates",
23 "predicates-core",
24 "predicates-tree",
25 "wait-timeout",
26 ]
27
28 [[package]]
29 name = "atty"
30 version = "0.2.14"
31 source = "registry+https://github.com/rust-lang/crates.io-index"
32 checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
33 dependencies = [
34 "hermit-abi",
35 "libc",
36 "winapi",
37 ]
38
39 [[package]]
40 name = "autocfg"
41 version = "1.1.0"
42 source = "registry+https://github.com/rust-lang/crates.io-index"
43 checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
44
45 [[package]]
46 name = "bitflags"
47 version = "1.3.2"
48 source = "registry+https://github.com/rust-lang/crates.io-index"
49 checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
50
51 [[package]]
52 name = "bstr"
53 version = "0.2.17"
54 source = "registry+https://github.com/rust-lang/crates.io-index"
55 checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223"
56 dependencies = [
57 "lazy_static",
58 "memchr",
59 "regex-automata",
60 ]
61
62 [[package]]
63 name = "clap"
64 version = "3.1.6"
65 source = "registry+https://github.com/rust-lang/crates.io-index"
66 checksum = "d8c93436c21e4698bacadf42917db28b23017027a4deccb35dbe47a7e7840123"
67 dependencies = [
68 "atty",
69 "bitflags",
70 "clap_derive",
71 "indexmap",
72 "lazy_static",
73 "os_str_bytes",
74 "strsim",
75 "termcolor",
76 "textwrap",
77 ]
78
79 [[package]]
80 name = "clap_derive"
81 version = "3.1.4"
82 source = "registry+https://github.com/rust-lang/crates.io-index"
83 checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16"
84 dependencies = [
85 "heck",
86 "proc-macro-error",
87 "proc-macro2",
88 "quote",
89 "syn",
90 ]
91
92 [[package]]
93 name = "console"
94 version = "0.15.0"
95 source = "registry+https://github.com/rust-lang/crates.io-index"
96 checksum = "a28b32d32ca44b70c3e4acd7db1babf555fa026e385fb95f18028f88848b3c31"
97 dependencies = [
98 "encode_unicode",
99 "libc",
100 "once_cell",
101 "terminal_size",
102 "winapi",
103 ]
104
105 [[package]]
106 name = "difflib"
107 version = "0.4.0"
108 source = "registry+https://github.com/rust-lang/crates.io-index"
109 checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
110
111 [[package]]
112 name = "doc-comment"
113 version = "0.3.3"
114 source = "registry+https://github.com/rust-lang/crates.io-index"
115 checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
116
117 [[package]]
118 name = "either"
119 version = "1.6.1"
120 source = "registry+https://github.com/rust-lang/crates.io-index"
121 checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
122
123 [[package]]
124 name = "encode_unicode"
125 version = "0.3.6"
126 source = "registry+https://github.com/rust-lang/crates.io-index"
127 checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
128
129 [[package]]
130 name = "fuchsia-cprng"
131 version = "0.1.1"
132 source = "registry+https://github.com/rust-lang/crates.io-index"
133 checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
134
135 [[package]]
136 name = "hashbrown"
137 version = "0.11.2"
138 source = "registry+https://github.com/rust-lang/crates.io-index"
139 checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
140
141 [[package]]
142 name = "heck"
143 version = "0.4.0"
144 source = "registry+https://github.com/rust-lang/crates.io-index"
145 checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
146
147 [[package]]
148 name = "hermit-abi"
149 version = "0.1.19"
150 source = "registry+https://github.com/rust-lang/crates.io-index"
151 checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
152 dependencies = [
153 "libc",
154 ]
155
156 [[package]]
157 name = "indexmap"
158 version = "1.8.0"
159 source = "registry+https://github.com/rust-lang/crates.io-index"
160 checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223"
161 dependencies = [
162 "autocfg",
163 "hashbrown",
164 ]
165
166 [[package]]
167 name = "insta"
168 version = "1.13.0"
169 source = "registry+https://github.com/rust-lang/crates.io-index"
170 checksum = "30a7e1911532a662f6b08b68f884080850f2fd9544963c3ab23a5af42bda1eac"
171 dependencies = [
172 "console",
173 "once_cell",
174 "serde",
175 "serde_json",
176 "serde_yaml",
177 "similar",
178 ]
179
180 [[package]]
181 name = "itertools"
182 version = "0.10.3"
183 source = "registry+https://github.com/rust-lang/crates.io-index"
184 checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
185 dependencies = [
186 "either",
187 ]
188
189 [[package]]
190 name = "itoa"
191 version = "1.0.1"
192 source = "registry+https://github.com/rust-lang/crates.io-index"
193 checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
194
195 [[package]]
196 name = "lazy_static"
197 version = "1.4.0"
198 source = "registry+https://github.com/rust-lang/crates.io-index"
199 checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
200
201 [[package]]
202 name = "libc"
203 version = "0.2.119"
204 source = "registry+https://github.com/rust-lang/crates.io-index"
205 checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4"
206
207 [[package]]
208 name = "linked-hash-map"
209 version = "0.5.4"
210 source = "registry+https://github.com/rust-lang/crates.io-index"
211 checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
212
213 [[package]]
214 name = "memchr"
215 version = "2.4.1"
216 source = "registry+https://github.com/rust-lang/crates.io-index"
217 checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
218
219 [[package]]
220 name = "merge-lists"
221 version = "0.1.0"
222 dependencies = [
223 "assert_cmd",
224 "clap",
225 "insta",
226 "itertools",
227 "regex",
228 "similar",
229 "tempdir",
230 ]
231
232 [[package]]
233 name = "once_cell"
234 version = "1.10.0"
235 source = "registry+https://github.com/rust-lang/crates.io-index"
236 checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
237
238 [[package]]
239 name = "os_str_bytes"
240 version = "6.0.0"
241 source = "registry+https://github.com/rust-lang/crates.io-index"
242 checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
243 dependencies = [
244 "memchr",
245 ]
246
247 [[package]]
248 name = "predicates"
249 version = "2.1.1"
250 source = "registry+https://github.com/rust-lang/crates.io-index"
251 checksum = "a5aab5be6e4732b473071984b3164dbbfb7a3674d30ea5ff44410b6bcd960c3c"
252 dependencies = [
253 "difflib",
254 "itertools",
255 "predicates-core",
256 ]
257
258 [[package]]
259 name = "predicates-core"
260 version = "1.0.3"
261 source = "registry+https://github.com/rust-lang/crates.io-index"
262 checksum = "da1c2388b1513e1b605fcec39a95e0a9e8ef088f71443ef37099fa9ae6673fcb"
263
264 [[package]]
265 name = "predicates-tree"
266 version = "1.0.5"
267 source = "registry+https://github.com/rust-lang/crates.io-index"
268 checksum = "4d86de6de25020a36c6d3643a86d9a6a9f552107c0559c60ea03551b5e16c032"
269 dependencies = [
270 "predicates-core",
271 "termtree",
272 ]
273
274 [[package]]
275 name = "proc-macro-error"
276 version = "1.0.4"
277 source = "registry+https://github.com/rust-lang/crates.io-index"
278 checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
279 dependencies = [
280 "proc-macro-error-attr",
281 "proc-macro2",
282 "quote",
283 "syn",
284 "version_check",
285 ]
286
287 [[package]]
288 name = "proc-macro-error-attr"
289 version = "1.0.4"
290 source = "registry+https://github.com/rust-lang/crates.io-index"
291 checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
292 dependencies = [
293 "proc-macro2",
294 "quote",
295 "version_check",
296 ]
297
298 [[package]]
299 name = "proc-macro2"
300 version = "1.0.36"
301 source = "registry+https://github.com/rust-lang/crates.io-index"
302 checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
303 dependencies = [
304 "unicode-xid",
305 ]
306
307 [[package]]
308 name = "quote"
309 version = "1.0.15"
310 source = "registry+https://github.com/rust-lang/crates.io-index"
311 checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
312 dependencies = [
313 "proc-macro2",
314 ]
315
316 [[package]]
317 name = "rand"
318 version = "0.4.6"
319 source = "registry+https://github.com/rust-lang/crates.io-index"
320 checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293"
321 dependencies = [
322 "fuchsia-cprng",
323 "libc",
324 "rand_core 0.3.1",
325 "rdrand",
326 "winapi",
327 ]
328
329 [[package]]
330 name = "rand_core"
331 version = "0.3.1"
332 source = "registry+https://github.com/rust-lang/crates.io-index"
333 checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
334 dependencies = [
335 "rand_core 0.4.2",
336 ]
337
338 [[package]]
339 name = "rand_core"
340 version = "0.4.2"
341 source = "registry+https://github.com/rust-lang/crates.io-index"
342 checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
343
344 [[package]]
345 name = "rdrand"
346 version = "0.4.0"
347 source = "registry+https://github.com/rust-lang/crates.io-index"
348 checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
349 dependencies = [
350 "rand_core 0.3.1",
351 ]
352
353 [[package]]
354 name = "regex"
355 version = "1.5.5"
356 source = "registry+https://github.com/rust-lang/crates.io-index"
357 checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
358 dependencies = [
359 "aho-corasick",
360 "memchr",
361 "regex-syntax",
362 ]
363
364 [[package]]
365 name = "regex-automata"
366 version = "0.1.10"
367 source = "registry+https://github.com/rust-lang/crates.io-index"
368 checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
369
370 [[package]]
371 name = "regex-syntax"
372 version = "0.6.25"
373 source = "registry+https://github.com/rust-lang/crates.io-index"
374 checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
375
376 [[package]]
377 name = "remove_dir_all"
378 version = "0.5.3"
379 source = "registry+https://github.com/rust-lang/crates.io-index"
380 checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
381 dependencies = [
382 "winapi",
383 ]
384
385 [[package]]
386 name = "ryu"
387 version = "1.0.9"
388 source = "registry+https://github.com/rust-lang/crates.io-index"
389 checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
390
391 [[package]]
392 name = "serde"
393 version = "1.0.136"
394 source = "registry+https://github.com/rust-lang/crates.io-index"
395 checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
396 dependencies = [
397 "serde_derive",
398 ]
399
400 [[package]]
401 name = "serde_derive"
402 version = "1.0.136"
403 source = "registry+https://github.com/rust-lang/crates.io-index"
404 checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
405 dependencies = [
406 "proc-macro2",
407 "quote",
408 "syn",
409 ]
410
411 [[package]]
412 name = "serde_json"
413 version = "1.0.79"
414 source = "registry+https://github.com/rust-lang/crates.io-index"
415 checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
416 dependencies = [
417 "itoa",
418 "ryu",
419 "serde",
420 ]
421
422 [[package]]
423 name = "serde_yaml"
424 version = "0.8.23"
425 source = "registry+https://github.com/rust-lang/crates.io-index"
426 checksum = "a4a521f2940385c165a24ee286aa8599633d162077a54bdcae2a6fd5a7bfa7a0"
427 dependencies = [
428 "indexmap",
429 "ryu",
430 "serde",
431 "yaml-rust",
432 ]
433
434 [[package]]
435 name = "similar"
436 version = "2.1.0"
437 source = "registry+https://github.com/rust-lang/crates.io-index"
438 checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3"
439 dependencies = [
440 "bstr",
441 ]
442
443 [[package]]
444 name = "strsim"
445 version = "0.10.0"
446 source = "registry+https://github.com/rust-lang/crates.io-index"
447 checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
448
449 [[package]]
450 name = "syn"
451 version = "1.0.87"
452 source = "registry+https://github.com/rust-lang/crates.io-index"
453 checksum = "1e59d925cf59d8151f25a3bedf97c9c157597c9df7324d32d68991cc399ed08b"
454 dependencies = [
455 "proc-macro2",
456 "quote",
457 "unicode-xid",
458 ]
459
460 [[package]]
461 name = "tempdir"
462 version = "0.3.7"
463 source = "registry+https://github.com/rust-lang/crates.io-index"
464 checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
465 dependencies = [
466 "rand",
467 "remove_dir_all",
468 ]
469
470 [[package]]
471 name = "termcolor"
472 version = "1.1.3"
473 source = "registry+https://github.com/rust-lang/crates.io-index"
474 checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
475 dependencies = [
476 "winapi-util",
477 ]
478
479 [[package]]
480 name = "terminal_size"
481 version = "0.1.17"
482 source = "registry+https://github.com/rust-lang/crates.io-index"
483 checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df"
484 dependencies = [
485 "libc",
486 "winapi",
487 ]
488
489 [[package]]
490 name = "termtree"
491 version = "0.2.4"
492 source = "registry+https://github.com/rust-lang/crates.io-index"
493 checksum = "507e9898683b6c43a9aa55b64259b721b52ba226e0f3779137e50ad114a4c90b"
494
495 [[package]]
496 name = "textwrap"
497 version = "0.15.0"
498 source = "registry+https://github.com/rust-lang/crates.io-index"
499 checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
500
501 [[package]]
502 name = "unicode-xid"
503 version = "0.2.2"
504 source = "registry+https://github.com/rust-lang/crates.io-index"
505 checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
506
507 [[package]]
508 name = "version_check"
509 version = "0.9.4"
510 source = "registry+https://github.com/rust-lang/crates.io-index"
511 checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
512
513 [[package]]
514 name = "wait-timeout"
515 version = "0.2.0"
516 source = "registry+https://github.com/rust-lang/crates.io-index"
517 checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
518 dependencies = [
519 "libc",
520 ]
521
522 [[package]]
523 name = "winapi"
524 version = "0.3.9"
525 source = "registry+https://github.com/rust-lang/crates.io-index"
526 checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
527 dependencies = [
528 "winapi-i686-pc-windows-gnu",
529 "winapi-x86_64-pc-windows-gnu",
530 ]
531
532 [[package]]
533 name = "winapi-i686-pc-windows-gnu"
534 version = "0.4.0"
535 source = "registry+https://github.com/rust-lang/crates.io-index"
536 checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
537
538 [[package]]
539 name = "winapi-util"
540 version = "0.1.5"
541 source = "registry+https://github.com/rust-lang/crates.io-index"
542 checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
543 dependencies = [
544 "winapi",
545 ]
546
547 [[package]]
548 name = "winapi-x86_64-pc-windows-gnu"
549 version = "0.4.0"
550 source = "registry+https://github.com/rust-lang/crates.io-index"
551 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
552
553 [[package]]
554 name = "yaml-rust"
555 version = "0.4.5"
556 source = "registry+https://github.com/rust-lang/crates.io-index"
557 checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
558 dependencies = [
559 "linked-hash-map",
560 ]
@@ -0,0 +1,21 b''
1 # A tool that performs a 3-way merge, resolving conflicts in sorted lists and
2 # leaving other conflicts unchanged. This is useful with Mercurial's support
3 # for partial merge tools (configured in `[partial-merge-tools]`).
4
5 [package]
6 name = "merge-lists"
7 version = "0.1.0"
8 edition = "2021"
9 # We need https://github.com/rust-lang/rust/pull/89825
10 rust-version = "1.59"
11
12 [dependencies]
13 clap = { version = "3.1.6", features = ["derive"] }
14 itertools = "0.10.3"
15 regex = "1.5.5"
16 similar = { version="2.1.0", features = ["bytes"] }
17
18 [dev-dependencies]
19 assert_cmd = "2.0.4"
20 insta = "1.13.0"
21 tempdir = "0.3.7"
@@ -0,0 +1,300 b''
1 use clap::{ArgGroup, Parser};
2 use itertools::Itertools;
3 use regex::bytes::Regex;
4 use similar::ChangeTag;
5 use std::cmp::{max, min, Ordering};
6 use std::collections::HashSet;
7 use std::ffi::OsString;
8 use std::ops::Range;
9 use std::path::PathBuf;
10
11 fn find_unchanged_ranges(
12 old_bytes: &[u8],
13 new_bytes: &[u8],
14 ) -> Vec<(Range<usize>, Range<usize>)> {
15 let diff = similar::TextDiff::configure()
16 .algorithm(similar::Algorithm::Patience)
17 .diff_lines(old_bytes, new_bytes);
18 let mut new_unchanged_ranges = vec![];
19 let mut old_index = 0;
20 let mut new_index = 0;
21 for diff in diff.iter_all_changes() {
22 match diff.tag() {
23 ChangeTag::Equal => {
24 new_unchanged_ranges.push((
25 old_index..old_index + diff.value().len(),
26 new_index..new_index + diff.value().len(),
27 ));
28 old_index += diff.value().len();
29 new_index += diff.value().len();
30 }
31 ChangeTag::Delete => {
32 old_index += diff.value().len();
33 }
34 ChangeTag::Insert => {
35 new_index += diff.value().len();
36 }
37 }
38 }
39 new_unchanged_ranges
40 }
41
42 /// Returns a list of all the lines in the input (including trailing newlines),
43 /// but only if they all match the regex and they are sorted.
44 fn get_lines<'input>(
45 input: &'input [u8],
46 regex: &Regex,
47 ) -> Option<Vec<&'input [u8]>> {
48 let lines = input.split_inclusive(|x| *x == b'\n').collect_vec();
49 let mut previous_line = "".as_bytes();
50 for line in &lines {
51 if *line < previous_line {
52 return None;
53 }
54 if !regex.is_match(line) {
55 return None;
56 }
57 previous_line = line;
58 }
59 Some(lines)
60 }
61
62 fn resolve_conflict(
63 base_slice: &[u8],
64 local_slice: &[u8],
65 other_slice: &[u8],
66 regex: &Regex,
67 ) -> Option<Vec<u8>> {
68 let base_lines = get_lines(base_slice, regex)?;
69 let local_lines = get_lines(local_slice, regex)?;
70 let other_lines = get_lines(other_slice, regex)?;
71 let base_lines_set: HashSet<_> = base_lines.iter().copied().collect();
72 let local_lines_set: HashSet<_> = local_lines.iter().copied().collect();
73 let other_lines_set: HashSet<_> = other_lines.iter().copied().collect();
74 let mut result = local_lines_set;
75 for to_add in other_lines_set.difference(&base_lines_set) {
76 result.insert(to_add);
77 }
78 for to_remove in base_lines_set.difference(&other_lines_set) {
79 result.remove(to_remove);
80 }
81 Some(result.into_iter().sorted().collect_vec().concat())
82 }
83
84 fn resolve(
85 base_bytes: &[u8],
86 local_bytes: &[u8],
87 other_bytes: &[u8],
88 regex: &Regex,
89 ) -> (Vec<u8>, Vec<u8>, Vec<u8>) {
90 // Find unchanged ranges between the base and the two sides. We do that by
91 // initially considering the whole base unchanged. Then we compare each
92 // side with the base and intersect the unchanged ranges we find with
93 // what we had before.
94 let unchanged_ranges = vec![UnchangedRange {
95 base_range: 0..base_bytes.len(),
96 offsets: vec![],
97 }];
98 let unchanged_ranges = intersect_regions(
99 unchanged_ranges,
100 &find_unchanged_ranges(base_bytes, local_bytes),
101 );
102 let mut unchanged_ranges = intersect_regions(
103 unchanged_ranges,
104 &find_unchanged_ranges(base_bytes, other_bytes),
105 );
106 // Add an empty UnchangedRange at the end to make it easier to find change
107 // ranges. That way there's a changed range before each UnchangedRange.
108 unchanged_ranges.push(UnchangedRange {
109 base_range: base_bytes.len()..base_bytes.len(),
110 offsets: vec![
111 local_bytes.len().wrapping_sub(base_bytes.len()) as isize,
112 other_bytes.len().wrapping_sub(base_bytes.len()) as isize,
113 ],
114 });
115
116 let mut new_base_bytes: Vec<u8> = vec![];
117 let mut new_local_bytes: Vec<u8> = vec![];
118 let mut new_other_bytes: Vec<u8> = vec![];
119 let mut previous = UnchangedRange {
120 base_range: 0..0,
121 offsets: vec![0, 0],
122 };
123 for current in unchanged_ranges {
124 let base_slice =
125 &base_bytes[previous.base_range.end..current.base_range.start];
126 let local_slice = &local_bytes[previous.end(0)..current.start(0)];
127 let other_slice = &other_bytes[previous.end(1)..current.start(1)];
128 if let Some(resolution) =
129 resolve_conflict(base_slice, local_slice, other_slice, regex)
130 {
131 new_base_bytes.extend(&resolution);
132 new_local_bytes.extend(&resolution);
133 new_other_bytes.extend(&resolution);
134 } else {
135 new_base_bytes.extend(base_slice);
136 new_local_bytes.extend(local_slice);
137 new_other_bytes.extend(other_slice);
138 }
139 new_base_bytes.extend(&base_bytes[current.base_range.clone()]);
140 new_local_bytes.extend(&local_bytes[current.start(0)..current.end(0)]);
141 new_other_bytes.extend(&other_bytes[current.start(1)..current.end(1)]);
142 previous = current;
143 }
144
145 (new_base_bytes, new_local_bytes, new_other_bytes)
146 }
147
148 /// A tool that performs a 3-way merge, resolving conflicts in sorted lists and
149 /// leaving other conflicts unchanged. This is useful with Mercurial's support
150 /// for partial merge tools (configured in `[partial-merge-tools]`).
151 #[derive(Parser, Debug)]
152 #[clap(version, about, long_about = None)]
153 #[clap(group(ArgGroup::new("match").required(true).args(&["pattern", "python-imports"])))]
154 struct Args {
155 /// Path to the file's content in the "local" side
156 local: OsString,
157
158 /// Path to the file's content in the base
159 base: OsString,
160
161 /// Path to the file's content in the "other" side
162 other: OsString,
163
164 /// Regular expression to use
165 #[clap(long, short)]
166 pattern: Option<String>,
167
168 /// Use built-in regular expression for Python imports
169 #[clap(long)]
170 python_imports: bool,
171 }
172
173 fn get_regex(args: &Args) -> Regex {
174 let pattern = if args.python_imports {
175 r"import \w+(\.\w+)*( +#.*)?\n|from (\w+(\.\w+)* import \w+( as \w+)?(, \w+( as \w+)?)*( +#.*)?)"
176 } else if let Some(pattern) = &args.pattern {
177 pattern
178 } else {
179 ".*"
180 };
181 let pattern = format!(r"{}\r?\n?", pattern);
182 regex::bytes::Regex::new(&pattern).unwrap()
183 }
184
185 fn main() {
186 let args: Args = Args::parse();
187
188 let base_path = PathBuf::from(&args.base);
189 let local_path = PathBuf::from(&args.local);
190 let other_path = PathBuf::from(&args.other);
191
192 let base_bytes = std::fs::read(&base_path).unwrap();
193 let local_bytes = std::fs::read(&local_path).unwrap();
194 let other_bytes = std::fs::read(&other_path).unwrap();
195
196 let regex = get_regex(&args);
197 let (new_base_bytes, new_local_bytes, new_other_bytes) =
198 resolve(&base_bytes, &local_bytes, &other_bytes, &regex);
199
200 // Write out the result if anything changed
201 if new_base_bytes != base_bytes {
202 std::fs::write(&base_path, new_base_bytes).unwrap();
203 }
204 if new_local_bytes != local_bytes {
205 std::fs::write(&local_path, new_local_bytes).unwrap();
206 }
207 if new_other_bytes != other_bytes {
208 std::fs::write(&other_path, new_other_bytes).unwrap();
209 }
210 }
211
212 fn checked_add(base: usize, offset: isize) -> usize {
213 if offset < 0 {
214 base.checked_sub(offset.checked_abs().unwrap() as usize)
215 .unwrap()
216 } else {
217 base.checked_add(offset as usize).unwrap()
218 }
219 }
220
221 // The remainder of the file is copied from
222 // https://github.com/martinvonz/jj/blob/main/lib/src/diff.rs
223
224 #[derive(Clone, PartialEq, Eq, Debug)]
225 struct UnchangedRange {
226 base_range: Range<usize>,
227 offsets: Vec<isize>,
228 }
229
230 impl UnchangedRange {
231 fn start(&self, side: usize) -> usize {
232 checked_add(self.base_range.start, self.offsets[side])
233 }
234
235 fn end(&self, side: usize) -> usize {
236 checked_add(self.base_range.end, self.offsets[side])
237 }
238 }
239
240 impl PartialOrd for UnchangedRange {
241 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
242 Some(self.cmp(other))
243 }
244 }
245
246 impl Ord for UnchangedRange {
247 fn cmp(&self, other: &Self) -> Ordering {
248 self.base_range
249 .start
250 .cmp(&other.base_range.start)
251 .then_with(|| self.base_range.end.cmp(&other.base_range.end))
252 }
253 }
254
255 /// Takes the current regions and intersects it with the new unchanged ranges
256 /// from a 2-way diff. The result is a map of unchanged regions with one more
257 /// offset in the map's values.
258 fn intersect_regions(
259 current_ranges: Vec<UnchangedRange>,
260 new_unchanged_ranges: &[(Range<usize>, Range<usize>)],
261 ) -> Vec<UnchangedRange> {
262 let mut result = vec![];
263 let mut current_ranges_iter = current_ranges.into_iter().peekable();
264 for (new_base_range, other_range) in new_unchanged_ranges.iter() {
265 assert_eq!(new_base_range.len(), other_range.len());
266 while let Some(UnchangedRange {
267 base_range,
268 offsets,
269 }) = current_ranges_iter.peek()
270 {
271 // No need to look further if we're past the new range.
272 if base_range.start >= new_base_range.end {
273 break;
274 }
275 // Discard any current unchanged regions that don't match between
276 // the base and the new input.
277 if base_range.end <= new_base_range.start {
278 current_ranges_iter.next();
279 continue;
280 }
281 let new_start = max(base_range.start, new_base_range.start);
282 let new_end = min(base_range.end, new_base_range.end);
283 let mut new_offsets = offsets.clone();
284 new_offsets
285 .push(other_range.start.wrapping_sub(new_base_range.start)
286 as isize);
287 result.push(UnchangedRange {
288 base_range: new_start..new_end,
289 offsets: new_offsets,
290 });
291 if base_range.end >= new_base_range.end {
292 // Break without consuming the item; there may be other new
293 // ranges that overlap with it.
294 break;
295 }
296 current_ranges_iter.next();
297 }
298 }
299 result
300 }
@@ -0,0 +1,204 b''
1 use similar::DiffableStr;
2 use std::ffi::OsStr;
3 use tempdir::TempDir;
4
5 fn run_test(arg: &str, input: &str) -> String {
6 let mut cmd = assert_cmd::Command::cargo_bin("merge-lists").unwrap();
7 let temp_dir = TempDir::new("test").unwrap();
8 let base_path = temp_dir.path().join("base");
9 let local_path = temp_dir.path().join("local");
10 let other_path = temp_dir.path().join("other");
11
12 let rest = input.strip_prefix("\nbase:\n").unwrap();
13 let mut split = rest.split("\nlocal:\n");
14 std::fs::write(&base_path, split.next().unwrap()).unwrap();
15 let rest = split.next().unwrap();
16 let mut split = rest.split("\nother:\n");
17 std::fs::write(&local_path, split.next().unwrap()).unwrap();
18 std::fs::write(&other_path, split.next().unwrap()).unwrap();
19 cmd.args(&[
20 OsStr::new(arg),
21 local_path.as_os_str(),
22 base_path.as_os_str(),
23 other_path.as_os_str(),
24 ])
25 .assert()
26 .success();
27
28 let new_base_bytes = std::fs::read(&base_path).unwrap();
29 let new_local_bytes = std::fs::read(&local_path).unwrap();
30 let new_other_bytes = std::fs::read(&other_path).unwrap();
31 // No newline before "base:" because of https://github.com/mitsuhiko/insta/issues/117
32 format!(
33 "base:\n{}\nlocal:\n{}\nother:\n{}",
34 new_base_bytes.as_str().unwrap(),
35 new_local_bytes.as_str().unwrap(),
36 new_other_bytes.as_str().unwrap()
37 )
38 }
39
40 #[test]
41 fn test_merge_lists_basic() {
42 let output = run_test(
43 "--python-imports",
44 r"
45 base:
46 import lib1
47 import lib2
48
49 local:
50 import lib2
51 import lib3
52
53 other:
54 import lib3
55 import lib4
56 ",
57 );
58 insta::assert_snapshot!(output, @r###"
59 base:
60 import lib3
61 import lib4
62
63 local:
64 import lib3
65 import lib4
66
67 other:
68 import lib3
69 import lib4
70 "###);
71 }
72
73 #[test]
74 fn test_merge_lists_from() {
75 // Test some "from x import y" statements and some non-import conflicts
76 // (unresolvable)
77 let output = run_test(
78 "--python-imports",
79 r"
80 base:
81 from . import x
82
83 1+1
84
85 local:
86 from . import x
87 from a import b
88
89 2+2
90
91 other:
92 from a import c
93
94 3+3
95 ",
96 );
97 insta::assert_snapshot!(output, @r###"
98 base:
99 from a import b
100 from a import c
101
102 1+1
103
104 local:
105 from a import b
106 from a import c
107
108 2+2
109
110 other:
111 from a import b
112 from a import c
113
114 3+3
115 "###);
116 }
117
118 #[test]
119 fn test_merge_lists_not_sorted() {
120 // Test that nothing is done if the elements in the conflicting hunks are
121 // not sorted
122 let output = run_test(
123 "--python-imports",
124 r"
125 base:
126 import x
127
128 1+1
129
130 local:
131 import a
132 import x
133
134 2+2
135
136 other:
137 import z
138 import y
139
140 3+3
141 ",
142 );
143 insta::assert_snapshot!(output, @r###"
144 base:
145 import x
146
147 1+1
148
149 local:
150 import a
151 import x
152
153 2+2
154
155 other:
156 import z
157 import y
158
159 3+3
160 "###);
161 }
162
163 #[test]
164 fn test_custom_regex() {
165 // Test merging of all lines (by matching anything)
166 let output = run_test(
167 "--pattern=.*",
168 r"
169 base:
170 aardvark
171 baboon
172 camel
173
174 local:
175 aardvark
176 camel
177 eagle
178
179 other:
180 aardvark
181 camel
182 deer
183 ",
184 );
185 insta::assert_snapshot!(output, @r###"
186 base:
187 aardvark
188 camel
189 deer
190 eagle
191
192 local:
193 aardvark
194 camel
195 deer
196 eagle
197
198 other:
199 aardvark
200 camel
201 deer
202 eagle
203 "###);
204 }
@@ -0,0 +1,183 b''
1 #!/usr/bin/env python3
2 # compare various algorithm variants for a given case
3 #
4 # search-discovery-case REPO LOCAL_CASE REMOTE_CASE
5 #
6 # The description for the case input uses the same format as the ouput of
7 # search-discovery-case
8
9 import json
10 import os
11 import subprocess
12 import sys
13
14 this_script = os.path.abspath(sys.argv[0])
15 script_name = os.path.basename(this_script)
16 this_dir = os.path.dirname(this_script)
17 hg_dir = os.path.join(this_dir, '..', '..')
18 HG_REPO = os.path.normpath(hg_dir)
19 HG_BIN = os.path.join(HG_REPO, 'hg')
20
21
22 SUBSET_PATH = os.path.join(HG_REPO, 'contrib', 'perf-utils', 'subsetmaker.py')
23
24 CMD_BASE = (
25 HG_BIN,
26 'debugdiscovery',
27 '--template',
28 'json',
29 '--config',
30 'extensions.subset=%s' % SUBSET_PATH,
31 )
32
33 # --old
34 # --nonheads
35 #
36 # devel.discovery.exchange-heads=True
37 # devel.discovery.grow-sample=True
38 # devel.discovery.grow-sample.dynamic=True
39
40 VARIANTS = {
41 'tree-discovery': ('--old',),
42 'set-discovery-basic': (
43 '--config',
44 'devel.discovery.exchange-heads=no',
45 '--config',
46 'devel.discovery.grow-sample=no',
47 '--config',
48 'devel.discovery.grow-sample.dynamic=no',
49 '--config',
50 'devel.discovery.randomize=yes',
51 ),
52 'set-discovery-heads': (
53 '--config',
54 'devel.discovery.exchange-heads=yes',
55 '--config',
56 'devel.discovery.grow-sample=no',
57 '--config',
58 'devel.discovery.grow-sample.dynamic=no',
59 '--config',
60 'devel.discovery.randomize=yes',
61 ),
62 'set-discovery-grow-sample': (
63 '--config',
64 'devel.discovery.exchange-heads=yes',
65 '--config',
66 'devel.discovery.grow-sample=yes',
67 '--config',
68 'devel.discovery.grow-sample.dynamic=no',
69 '--config',
70 'devel.discovery.randomize=yes',
71 ),
72 'set-discovery-dynamic-sample': (
73 '--config',
74 'devel.discovery.exchange-heads=yes',
75 '--config',
76 'devel.discovery.grow-sample=yes',
77 '--config',
78 'devel.discovery.grow-sample.dynamic=yes',
79 '--config',
80 'devel.discovery.randomize=yes',
81 ),
82 'set-discovery-default': (
83 '--config',
84 'devel.discovery.randomize=yes',
85 ),
86 }
87
88 VARIANTS_KEYS = [
89 'tree-discovery',
90 'set-discovery-basic',
91 'set-discovery-heads',
92 'set-discovery-grow-sample',
93 'set-discovery-dynamic-sample',
94 'set-discovery-default',
95 ]
96
97 assert set(VARIANTS.keys()) == set(VARIANTS_KEYS)
98
99
100 def format_case(case):
101 return '-'.join(str(s) for s in case)
102
103
104 def to_revsets(case):
105 t = case[0]
106 if t == 'scratch':
107 return 'not scratch(all(), %d, "%d")' % (case[1], case[2])
108 elif t == 'randomantichain':
109 return '::randomantichain(all(), "%d")' % case[1]
110 elif t == 'rev':
111 return '::%d' % case[1]
112 else:
113 assert False
114
115
116 def compare(repo, local_case, remote_case):
117 case = (repo, local_case, remote_case)
118 for variant in VARIANTS_KEYS:
119 res = process(case, VARIANTS[variant])
120 revs = res["nb-revs"]
121 local_heads = res["nb-head-local"]
122 common_heads = res["nb-common-heads"]
123 roundtrips = res["total-roundtrips"]
124 queries = res["total-queries"]
125 if 'tree-discovery' in variant:
126 print(
127 repo,
128 format_case(local_case),
129 format_case(remote_case),
130 variant,
131 roundtrips,
132 queries,
133 revs,
134 local_heads,
135 common_heads,
136 )
137 else:
138 undecided_common = res["nb-ini_und-common"]
139 undecided_missing = res["nb-ini_und-missing"]
140 undecided = undecided_common + undecided_missing
141 print(
142 repo,
143 format_case(local_case),
144 format_case(remote_case),
145 variant,
146 roundtrips,
147 queries,
148 revs,
149 local_heads,
150 common_heads,
151 undecided,
152 undecided_common,
153 undecided_missing,
154 )
155 return 0
156
157
158 def process(case, variant):
159 (repo, left, right) = case
160 cmd = list(CMD_BASE)
161 cmd.append('-R')
162 cmd.append(repo)
163 cmd.append('--local-as-revs')
164 cmd.append(to_revsets(left))
165 cmd.append('--remote-as-revs')
166 cmd.append(to_revsets(right))
167 cmd.extend(variant)
168 s = subprocess.Popen(cmd, stdout=subprocess.PIPE)
169 out, err = s.communicate()
170 return json.loads(out)[0]
171
172
173 if __name__ == '__main__':
174 if len(sys.argv) != 4:
175 usage = f'USAGE: {script_name} REPO LOCAL_CASE REMOTE_CASE'
176 print(usage, file=sys.stderr)
177 sys.exit(128)
178 repo = sys.argv[1]
179 local_case = sys.argv[2].split('-')
180 local_case = (local_case[0],) + tuple(int(x) for x in local_case[1:])
181 remote_case = sys.argv[3].split('-')
182 remote_case = (remote_case[0],) + tuple(int(x) for x in remote_case[1:])
183 sys.exit(compare(repo, local_case, remote_case))
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,8 +1,3 b''
1 ---
2 name: Official Review
3 about: Submit a series for review
4 ---
5
6 /assign_reviewer @mercurial.review
1 /assign_reviewer @mercurial.review
7
2
8 Welcome to the Mercurial Merge Request creation process:
3 Welcome to the Mercurial Merge Request creation process:
@@ -22,6 +22,8 b' syntax: glob'
22 tests/artifacts/cache/big-file-churn.hg
22 tests/artifacts/cache/big-file-churn.hg
23 tests/.coverage*
23 tests/.coverage*
24 tests/.testtimes*
24 tests/.testtimes*
25 # the file is written in the CWD when run-tests is run.
26 .testtimes
25 tests/.hypothesis
27 tests/.hypothesis
26 tests/hypothesis-generated
28 tests/hypothesis-generated
27 tests/annotated
29 tests/annotated
@@ -33,6 +35,7 b' build'
33 contrib/chg/chg
35 contrib/chg/chg
34 contrib/hgsh/hgsh
36 contrib/hgsh/hgsh
35 contrib/vagrant/.vagrant
37 contrib/vagrant/.vagrant
38 contrib/merge-lists/target/
36 dist
39 dist
37 packages
40 packages
38 doc/common.txt
41 doc/common.txt
@@ -151,12 +151,9 b' testpy-%:'
151 $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
151 $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
152 cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
152 cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
153
153
154 rust-tests: py_feature = $(shell $(PYTHON) -c \
155 'import sys; print(["python27-bin", "python3-bin"][sys.version_info[0] >= 3])')
156 rust-tests:
154 rust-tests:
157 cd $(HGROOT)/rust/hg-cpython \
155 cd $(HGROOT)/rust/hg-cpython \
158 && $(CARGO) test --quiet --all \
156 && $(CARGO) test --quiet --all --features "$(HG_RUST_FEATURES)"
159 --no-default-features --features "$(py_feature) $(HG_RUST_FEATURES)"
160
157
161 check-code:
158 check-code:
162 hg manifest | xargs python contrib/check-code.py
159 hg manifest | xargs python contrib/check-code.py
@@ -238,16 +235,6 b' osx:'
238 # Place a bogon .DS_Store file in the target dir so we can be
235 # Place a bogon .DS_Store file in the target dir so we can be
239 # sure it doesn't get included in the final package.
236 # sure it doesn't get included in the final package.
240 touch build/mercurial/.DS_Store
237 touch build/mercurial/.DS_Store
241 # install zsh completions - this location appears to be
242 # searched by default as of macOS Sierra.
243 install -d build/mercurial/usr/local/share/zsh/site-functions/
244 install -m 0644 contrib/zsh_completion build/mercurial/usr/local/share/zsh/site-functions/_hg
245 # install bash completions - there doesn't appear to be a
246 # place that's searched by default for bash, so we'll follow
247 # the lead of Apple's git install and just put it in a
248 # location of our own.
249 install -d build/mercurial/usr/local/hg/contrib/
250 install -m 0644 contrib/bash_completion build/mercurial/usr/local/hg/contrib/hg-completion.bash
251 make -C contrib/chg \
238 make -C contrib/chg \
252 HGPATH=/usr/local/bin/hg \
239 HGPATH=/usr/local/bin/hg \
253 PYTHON=/usr/bin/python2.7 \
240 PYTHON=/usr/bin/python2.7 \
@@ -919,17 +919,12 b' def ensure_linux_dev_ami(c: AWSConnectio'
919 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
919 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
920 }
920 }
921
921
922 requirements2_path = (
923 pathlib.Path(__file__).parent.parent / 'linux-requirements-py2.txt'
924 )
925 requirements3_path = (
922 requirements3_path = (
926 pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.txt'
923 pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.txt'
927 )
924 )
928 requirements35_path = (
925 requirements35_path = (
929 pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.5.txt'
926 pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.5.txt'
930 )
927 )
931 with requirements2_path.open('r', encoding='utf-8') as fh:
932 requirements2 = fh.read()
933 with requirements3_path.open('r', encoding='utf-8') as fh:
928 with requirements3_path.open('r', encoding='utf-8') as fh:
934 requirements3 = fh.read()
929 requirements3 = fh.read()
935 with requirements35_path.open('r', encoding='utf-8') as fh:
930 with requirements35_path.open('r', encoding='utf-8') as fh:
@@ -941,7 +936,6 b' def ensure_linux_dev_ami(c: AWSConnectio'
941 {
936 {
942 'instance_config': config,
937 'instance_config': config,
943 'bootstrap_script': BOOTSTRAP_DEBIAN,
938 'bootstrap_script': BOOTSTRAP_DEBIAN,
944 'requirements_py2': requirements2,
945 'requirements_py3': requirements3,
939 'requirements_py3': requirements3,
946 'requirements_py35': requirements35,
940 'requirements_py35': requirements35,
947 }
941 }
@@ -977,10 +971,6 b' def ensure_linux_dev_ami(c: AWSConnectio'
977 fh.write(BOOTSTRAP_DEBIAN)
971 fh.write(BOOTSTRAP_DEBIAN)
978 fh.chmod(0o0700)
972 fh.chmod(0o0700)
979
973
980 with sftp.open('%s/requirements-py2.txt' % home, 'wb') as fh:
981 fh.write(requirements2)
982 fh.chmod(0o0700)
983
984 with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh:
974 with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh:
985 fh.write(requirements3)
975 fh.write(requirements3)
986 fh.chmod(0o0700)
976 fh.chmod(0o0700)
@@ -65,7 +65,6 b' def bootstrap_windows_dev(hga: HGAutomat'
65 def build_inno(
65 def build_inno(
66 hga: HGAutomation,
66 hga: HGAutomation,
67 aws_region,
67 aws_region,
68 python_version,
69 arch,
68 arch,
70 revision,
69 revision,
71 version,
70 version,
@@ -80,21 +79,18 b' def build_inno('
80
79
81 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
80 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
82
81
83 for py_version in python_version:
82 for a in arch:
84 for a in arch:
83 windows.build_inno_installer(
85 windows.build_inno_installer(
84 instance.winrm_client,
86 instance.winrm_client,
85 a,
87 py_version,
86 DIST_PATH,
88 a,
87 version=version,
89 DIST_PATH,
88 )
90 version=version,
91 )
92
89
93
90
94 def build_wix(
91 def build_wix(
95 hga: HGAutomation,
92 hga: HGAutomation,
96 aws_region,
93 aws_region,
97 python_version,
98 arch,
94 arch,
99 revision,
95 revision,
100 version,
96 version,
@@ -109,15 +105,13 b' def build_wix('
109
105
110 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
106 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
111
107
112 for py_version in python_version:
108 for a in arch:
113 for a in arch:
109 windows.build_wix_installer(
114 windows.build_wix_installer(
110 instance.winrm_client,
115 instance.winrm_client,
111 a,
116 py_version,
112 DIST_PATH,
117 a,
113 version=version,
118 DIST_PATH,
114 )
119 version=version,
120 )
121
115
122
116
123 def build_windows_wheel(
117 def build_windows_wheel(
@@ -158,7 +152,7 b' def build_all_windows_packages('
158
152
159 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
153 windows.synchronize_hg(SOURCE_ROOT, revision, instance)
160
154
161 for py_version in ("2.7", "3.7", "3.8", "3.9", "3.10"):
155 for py_version in ("3.7", "3.8", "3.9", "3.10"):
162 for arch in ("x86", "x64"):
156 for arch in ("x86", "x64"):
163 windows.purge_hg(winrm_client)
157 windows.purge_hg(winrm_client)
164 windows.build_wheel(
158 windows.build_wheel(
@@ -168,15 +162,14 b' def build_all_windows_packages('
168 dest_path=DIST_PATH,
162 dest_path=DIST_PATH,
169 )
163 )
170
164
171 for py_version in (2, 3):
165 for arch in ('x86', 'x64'):
172 for arch in ('x86', 'x64'):
166 windows.purge_hg(winrm_client)
173 windows.purge_hg(winrm_client)
167 windows.build_inno_installer(
174 windows.build_inno_installer(
168 winrm_client, arch, DIST_PATH, version=version
175 winrm_client, py_version, arch, DIST_PATH, version=version
169 )
176 )
170 windows.build_wix_installer(
177 windows.build_wix_installer(
171 winrm_client, arch, DIST_PATH, version=version
178 winrm_client, py_version, arch, DIST_PATH, version=version
172 )
179 )
180
173
181
174
182 def terminate_ec2_instances(hga: HGAutomation, aws_region):
175 def terminate_ec2_instances(hga: HGAutomation, aws_region):
@@ -340,14 +333,6 b' def get_parser():'
340 help='Build Inno Setup installer(s)',
333 help='Build Inno Setup installer(s)',
341 )
334 )
342 sp.add_argument(
335 sp.add_argument(
343 '--python-version',
344 help='Which version of Python to target',
345 choices={2, 3},
346 type=int,
347 nargs='*',
348 default=[3],
349 )
350 sp.add_argument(
351 '--arch',
336 '--arch',
352 help='Architecture to build for',
337 help='Architecture to build for',
353 choices={'x86', 'x64'},
338 choices={'x86', 'x64'},
@@ -377,7 +362,7 b' def get_parser():'
377 sp.add_argument(
362 sp.add_argument(
378 '--python-version',
363 '--python-version',
379 help='Python version to build for',
364 help='Python version to build for',
380 choices={'2.7', '3.7', '3.8', '3.9', '3.10'},
365 choices={'3.7', '3.8', '3.9', '3.10'},
381 nargs='*',
366 nargs='*',
382 default=['3.8'],
367 default=['3.8'],
383 )
368 )
@@ -402,14 +387,6 b' def get_parser():'
402
387
403 sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
388 sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
404 sp.add_argument(
389 sp.add_argument(
405 '--python-version',
406 help='Which version of Python to target',
407 choices={2, 3},
408 type=int,
409 nargs='*',
410 default=[3],
411 )
412 sp.add_argument(
413 '--arch',
390 '--arch',
414 help='Architecture to build for',
391 help='Architecture to build for',
415 choices={'x86', 'x64'},
392 choices={'x86', 'x64'},
@@ -469,9 +446,7 b' def get_parser():'
469 '--python-version',
446 '--python-version',
470 help='Python version to use',
447 help='Python version to use',
471 choices={
448 choices={
472 'system2',
473 'system3',
449 'system3',
474 '2.7',
475 '3.5',
450 '3.5',
476 '3.6',
451 '3.6',
477 '3.7',
452 '3.7',
@@ -480,7 +455,7 b' def get_parser():'
480 'pypy3.5',
455 'pypy3.5',
481 'pypy3.6',
456 'pypy3.6',
482 },
457 },
483 default='system2',
458 default='system3',
484 )
459 )
485 sp.add_argument(
460 sp.add_argument(
486 'test_flags',
461 'test_flags',
@@ -501,8 +476,8 b' def get_parser():'
501 sp.add_argument(
476 sp.add_argument(
502 '--python-version',
477 '--python-version',
503 help='Python version to use',
478 help='Python version to use',
504 choices={'2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10'},
479 choices={'3.5', '3.6', '3.7', '3.8', '3.9', '3.10'},
505 default='2.7',
480 default='3.9',
506 )
481 )
507 sp.add_argument(
482 sp.add_argument(
508 '--arch',
483 '--arch',
@@ -25,7 +25,6 b' DISTROS = {'
25 }
25 }
26
26
27 INSTALL_PYTHONS = r'''
27 INSTALL_PYTHONS = r'''
28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
29 PYENV3_VERSIONS="3.5.10 3.6.13 3.7.10 3.8.10 3.9.5 pypy3.5-7.0.0 pypy3.6-7.3.3 pypy3.7-7.3.3"
28 PYENV3_VERSIONS="3.5.10 3.6.13 3.7.10 3.8.10 3.9.5 pypy3.5-7.0.0 pypy3.6-7.3.3 pypy3.7-7.3.3"
30
29
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
30 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
@@ -46,13 +45,6 b' VIRTUALENV_TARBALL=virtualenv-16.7.5.tar'
46 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
45 wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL}
47 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
46 echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check -
48
47
49 for v in ${PYENV2_VERSIONS}; do
50 pyenv install -v ${v}
51 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
52 ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL}
53 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt
54 done
55
56 for v in ${PYENV3_VERSIONS}; do
48 for v in ${PYENV3_VERSIONS}; do
57 pyenv install -v ${v}
49 pyenv install -v ${v}
58 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
50 ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py
@@ -72,7 +64,7 b' for v in ${PYENV3_VERSIONS}; do'
72 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/${REQUIREMENTS}
64 ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/${REQUIREMENTS}
73 done
65 done
74
66
75 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
67 pyenv global ${PYENV3_VERSIONS} system
76 '''.lstrip().replace(
68 '''.lstrip().replace(
77 '\r\n', '\n'
69 '\r\n', '\n'
78 )
70 )
@@ -274,17 +266,8 b' PACKAGES="\\'
274 netbase \
266 netbase \
275 ntfs-3g \
267 ntfs-3g \
276 nvme-cli \
268 nvme-cli \
277 pyflakes \
278 pyflakes3 \
269 pyflakes3 \
279 pylint \
280 pylint3 \
270 pylint3 \
281 python-all-dev \
282 python-dev \
283 python-docutils \
284 python-fuzzywuzzy \
285 python-pygments \
286 python-subversion \
287 python-vcr \
288 python3-boto3 \
271 python3-boto3 \
289 python3-dev \
272 python3-dev \
290 python3-docutils \
273 python3-docutils \
@@ -532,7 +515,7 b' def synchronize_hg('
532 hg_bin = source_path / 'hg'
515 hg_bin = source_path / 'hg'
533
516
534 res = subprocess.run(
517 res = subprocess.run(
535 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
518 ['python3', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
536 cwd=str(source_path),
519 cwd=str(source_path),
537 env=env,
520 env=env,
538 check=True,
521 check=True,
@@ -542,7 +525,7 b' def synchronize_hg('
542 full_revision = res.stdout.decode('ascii')
525 full_revision = res.stdout.decode('ascii')
543
526
544 args = [
527 args = [
545 'python2.7',
528 'python3',
546 str(hg_bin),
529 str(hg_bin),
547 '--config',
530 '--config',
548 'ui.ssh=ssh -F %s' % ssh_config,
531 'ui.ssh=ssh -F %s' % ssh_config,
@@ -595,9 +578,7 b' def run_tests(ssh_client, python_version'
595
578
596 print('running tests')
579 print('running tests')
597
580
598 if python_version == 'system2':
581 if python_version == 'system3':
599 python = '/usr/bin/python2'
600 elif python_version == 'system3':
601 python = '/usr/bin/python3'
582 python = '/usr/bin/python3'
602 elif python_version.startswith('pypy'):
583 elif python_version.startswith('pypy'):
603 python = '/hgdev/pyenv/shims/%s' % python_version
584 python = '/hgdev/pyenv/shims/%s' % python_version
@@ -19,30 +19,6 b' from .pypi import upload as pypi_upload'
19 from .winrm import run_powershell
19 from .winrm import run_powershell
20
20
21
21
22 # PowerShell commands to activate a Visual Studio 2008 environment.
23 # This is essentially a port of vcvarsall.bat to PowerShell.
24 ACTIVATE_VC9_AMD64 = r'''
25 Write-Output "activating Visual Studio 2008 environment for AMD64"
26 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
27 $Env:VCINSTALLDIR = "${root}\VC\"
28 $Env:WindowsSdkDir = "${root}\WinSDK\"
29 $Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH"
30 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH"
31 $Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB"
32 $Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH"
33 '''.lstrip()
34
35 ACTIVATE_VC9_X86 = r'''
36 Write-Output "activating Visual Studio 2008 environment for x86"
37 $root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0"
38 $Env:VCINSTALLDIR = "${root}\VC\"
39 $Env:WindowsSdkDir = "${root}\WinSDK\"
40 $Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH"
41 $Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE"
42 $Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB"
43 $Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib;$Env:LIBPATH"
44 '''.lstrip()
45
46 HG_PURGE = r'''
22 HG_PURGE = r'''
47 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
23 $Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH"
48 Set-Location C:\hgdev\src
24 Set-Location C:\hgdev\src
@@ -78,14 +54,6 b' if ($LASTEXITCODE -ne 0) {{'
78 }}
54 }}
79 '''
55 '''
80
56
81 BUILD_INNO_PYTHON2 = r'''
82 Set-Location C:\hgdev\src
83 $python = "C:\hgdev\python27-{arch}\python.exe"
84 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --python $python {extra_args}
85 if ($LASTEXITCODE -ne 0) {{
86 throw "process exited non-0: $LASTEXITCODE"
87 }}
88 '''.lstrip()
89
57
90 BUILD_WHEEL = r'''
58 BUILD_WHEEL = r'''
91 Set-Location C:\hgdev\src
59 Set-Location C:\hgdev\src
@@ -105,14 +73,6 b' if ($LASTEXITCODE -ne 0) {{'
105 }}
73 }}
106 '''
74 '''
107
75
108 BUILD_WIX_PYTHON2 = r'''
109 Set-Location C:\hgdev\src
110 $python = "C:\hgdev\python27-{arch}\python.exe"
111 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --python $python {extra_args}
112 if ($LASTEXITCODE -ne 0) {{
113 throw "process exited non-0: $LASTEXITCODE"
114 }}
115 '''
116
76
117 RUN_TESTS = r'''
77 RUN_TESTS = r'''
118 C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
78 C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}"
@@ -121,8 +81,7 b' if ($LASTEXITCODE -ne 0) {{'
121 }}
81 }}
122 '''
82 '''
123
83
124 WHEEL_FILENAME_PYTHON27_X86 = 'mercurial-{version}-cp27-cp27m-win32.whl'
84
125 WHEEL_FILENAME_PYTHON27_X64 = 'mercurial-{version}-cp27-cp27m-win_amd64.whl'
126 WHEEL_FILENAME_PYTHON37_X86 = 'mercurial-{version}-cp37-cp37m-win32.whl'
85 WHEEL_FILENAME_PYTHON37_X86 = 'mercurial-{version}-cp37-cp37m-win32.whl'
127 WHEEL_FILENAME_PYTHON37_X64 = 'mercurial-{version}-cp37-cp37m-win_amd64.whl'
86 WHEEL_FILENAME_PYTHON37_X64 = 'mercurial-{version}-cp37-cp37m-win_amd64.whl'
128 WHEEL_FILENAME_PYTHON38_X86 = 'mercurial-{version}-cp38-cp38-win32.whl'
87 WHEEL_FILENAME_PYTHON38_X86 = 'mercurial-{version}-cp38-cp38-win32.whl'
@@ -132,13 +91,9 b" WHEEL_FILENAME_PYTHON39_X64 = 'mercurial"
132 WHEEL_FILENAME_PYTHON310_X86 = 'mercurial-{version}-cp310-cp310-win32.whl'
91 WHEEL_FILENAME_PYTHON310_X86 = 'mercurial-{version}-cp310-cp310-win32.whl'
133 WHEEL_FILENAME_PYTHON310_X64 = 'mercurial-{version}-cp310-cp310-win_amd64.whl'
92 WHEEL_FILENAME_PYTHON310_X64 = 'mercurial-{version}-cp310-cp310-win_amd64.whl'
134
93
135 EXE_FILENAME_PYTHON2_X86 = 'Mercurial-{version}-x86-python2.exe'
136 EXE_FILENAME_PYTHON2_X64 = 'Mercurial-{version}-x64-python2.exe'
137 EXE_FILENAME_PYTHON3_X86 = 'Mercurial-{version}-x86.exe'
94 EXE_FILENAME_PYTHON3_X86 = 'Mercurial-{version}-x86.exe'
138 EXE_FILENAME_PYTHON3_X64 = 'Mercurial-{version}-x64.exe'
95 EXE_FILENAME_PYTHON3_X64 = 'Mercurial-{version}-x64.exe'
139
96
140 MSI_FILENAME_PYTHON2_X86 = 'mercurial-{version}-x86-python2.msi'
141 MSI_FILENAME_PYTHON2_X64 = 'mercurial-{version}-x64-python2.msi'
142 MSI_FILENAME_PYTHON3_X86 = 'mercurial-{version}-x86.msi'
97 MSI_FILENAME_PYTHON3_X86 = 'mercurial-{version}-x86.msi'
143 MSI_FILENAME_PYTHON3_X64 = 'mercurial-{version}-x64.msi'
98 MSI_FILENAME_PYTHON3_X64 = 'mercurial-{version}-x64.msi'
144
99
@@ -147,14 +102,6 b" MERCURIAL_SCM_BASE_URL = 'https://mercur"
147 X86_USER_AGENT_PATTERN = '.*Windows.*'
102 X86_USER_AGENT_PATTERN = '.*Windows.*'
148 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
103 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
149
104
150 EXE_PYTHON2_X86_DESCRIPTION = (
151 'Mercurial {version} Inno Setup installer - x86 Windows (Python 2) '
152 '- does not require admin rights'
153 )
154 EXE_PYTHON2_X64_DESCRIPTION = (
155 'Mercurial {version} Inno Setup installer - x64 Windows (Python 2) '
156 '- does not require admin rights'
157 )
158 # TODO remove Python version once Python 2 is dropped.
105 # TODO remove Python version once Python 2 is dropped.
159 EXE_PYTHON3_X86_DESCRIPTION = (
106 EXE_PYTHON3_X86_DESCRIPTION = (
160 'Mercurial {version} Inno Setup installer - x86 Windows (Python 3) '
107 'Mercurial {version} Inno Setup installer - x86 Windows (Python 3) '
@@ -164,14 +111,6 b' EXE_PYTHON3_X64_DESCRIPTION = ('
164 'Mercurial {version} Inno Setup installer - x64 Windows (Python 3) '
111 'Mercurial {version} Inno Setup installer - x64 Windows (Python 3) '
165 '- does not require admin rights'
112 '- does not require admin rights'
166 )
113 )
167 MSI_PYTHON2_X86_DESCRIPTION = (
168 'Mercurial {version} MSI installer - x86 Windows (Python 2) '
169 '- requires admin rights'
170 )
171 MSI_PYTHON2_X64_DESCRIPTION = (
172 'Mercurial {version} MSI installer - x64 Windows (Python 2) '
173 '- requires admin rights'
174 )
175 MSI_PYTHON3_X86_DESCRIPTION = (
114 MSI_PYTHON3_X86_DESCRIPTION = (
176 'Mercurial {version} MSI installer - x86 Windows (Python 3) '
115 'Mercurial {version} MSI installer - x86 Windows (Python 3) '
177 '- requires admin rights'
116 '- requires admin rights'
@@ -182,15 +121,6 b' MSI_PYTHON3_X64_DESCRIPTION = ('
182 )
121 )
183
122
184
123
185 def get_vc_prefix(arch):
186 if arch == 'x86':
187 return ACTIVATE_VC9_X86
188 elif arch == 'x64':
189 return ACTIVATE_VC9_AMD64
190 else:
191 raise ValueError('illegal arch: %s; must be x86 or x64' % arch)
192
193
194 def fix_authorized_keys_permissions(winrm_client, path):
124 def fix_authorized_keys_permissions(winrm_client, path):
195 commands = [
125 commands = [
196 '$ErrorActionPreference = "Stop"',
126 '$ErrorActionPreference = "Stop"',
@@ -261,7 +191,7 b' def synchronize_hg(hg_repo: pathlib.Path'
261 hg_bin = hg_repo / 'hg'
191 hg_bin = hg_repo / 'hg'
262
192
263 res = subprocess.run(
193 res = subprocess.run(
264 ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
194 ['python3', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
265 cwd=str(hg_repo),
195 cwd=str(hg_repo),
266 env=env,
196 env=env,
267 check=True,
197 check=True,
@@ -271,7 +201,7 b' def synchronize_hg(hg_repo: pathlib.Path'
271 full_revision = res.stdout.decode('ascii')
201 full_revision = res.stdout.decode('ascii')
272
202
273 args = [
203 args = [
274 'python2.7',
204 'python3',
275 hg_bin,
205 hg_bin,
276 '--config',
206 '--config',
277 'ui.ssh=ssh -F %s' % ssh_config,
207 'ui.ssh=ssh -F %s' % ssh_config,
@@ -334,7 +264,6 b' def copy_latest_dist(winrm_client, patte'
334
264
335 def build_inno_installer(
265 def build_inno_installer(
336 winrm_client,
266 winrm_client,
337 python_version: int,
338 arch: str,
267 arch: str,
339 dest_path: pathlib.Path,
268 dest_path: pathlib.Path,
340 version=None,
269 version=None,
@@ -344,37 +273,23 b' def build_inno_installer('
344 Using a WinRM client, remote commands are executed to build
273 Using a WinRM client, remote commands are executed to build
345 a Mercurial Inno Setup installer.
274 a Mercurial Inno Setup installer.
346 """
275 """
347 print(
276 print('building Inno Setup installer for %s' % arch)
348 'building Inno Setup installer for Python %d %s'
349 % (python_version, arch)
350 )
351
277
352 if python_version == 3:
278 # TODO fix this limitation in packaging code
353 # TODO fix this limitation in packaging code
279 if not version:
354 if not version:
280 raise Exception("version string is required when building for Python 3")
355 raise Exception(
356 "version string is required when building for Python 3"
357 )
358
281
359 if arch == "x86":
282 if arch == "x86":
360 target_triple = "i686-pc-windows-msvc"
283 target_triple = "i686-pc-windows-msvc"
361 elif arch == "x64":
284 elif arch == "x64":
362 target_triple = "x86_64-pc-windows-msvc"
285 target_triple = "x86_64-pc-windows-msvc"
363 else:
286 else:
364 raise Exception("unhandled arch: %s" % arch)
287 raise Exception("unhandled arch: %s" % arch)
365
288
366 ps = BUILD_INNO_PYTHON3.format(
289 ps = BUILD_INNO_PYTHON3.format(
367 pyoxidizer_target=target_triple,
290 pyoxidizer_target=target_triple,
368 version=version,
291 version=version,
369 )
292 )
370 else:
371 extra_args = []
372 if version:
373 extra_args.extend(['--version', version])
374
375 ps = get_vc_prefix(arch) + BUILD_INNO_PYTHON2.format(
376 arch=arch, extra_args=' '.join(extra_args)
377 )
378
293
379 run_powershell(winrm_client, ps)
294 run_powershell(winrm_client, ps)
380 copy_latest_dist(winrm_client, '*.exe', dest_path)
295 copy_latest_dist(winrm_client, '*.exe', dest_path)
@@ -394,17 +309,12 b' def build_wheel('
394 python_version=python_version.replace(".", ""), arch=arch
309 python_version=python_version.replace(".", ""), arch=arch
395 )
310 )
396
311
397 # Python 2.7 requires an activated environment.
398 if python_version == "2.7":
399 ps = get_vc_prefix(arch) + ps
400
401 run_powershell(winrm_client, ps)
312 run_powershell(winrm_client, ps)
402 copy_latest_dist(winrm_client, '*.whl', dest_path)
313 copy_latest_dist(winrm_client, '*.whl', dest_path)
403
314
404
315
405 def build_wix_installer(
316 def build_wix_installer(
406 winrm_client,
317 winrm_client,
407 python_version: int,
408 arch: str,
318 arch: str,
409 dest_path: pathlib.Path,
319 dest_path: pathlib.Path,
410 version=None,
320 version=None,
@@ -413,34 +323,23 b' def build_wix_installer('
413
323
414 Using a WinRM client, remote commands are executed to build a WiX installer.
324 Using a WinRM client, remote commands are executed to build a WiX installer.
415 """
325 """
416 print('Building WiX installer for Python %d %s' % (python_version, arch))
326 print('Building WiX installer for %s' % arch)
417
327
418 if python_version == 3:
328 # TODO fix this limitation in packaging code
419 # TODO fix this limitation in packaging code
329 if not version:
420 if not version:
330 raise Exception("version string is required when building for Python 3")
421 raise Exception(
422 "version string is required when building for Python 3"
423 )
424
331
425 if arch == "x86":
332 if arch == "x86":
426 target_triple = "i686-pc-windows-msvc"
333 target_triple = "i686-pc-windows-msvc"
427 elif arch == "x64":
334 elif arch == "x64":
428 target_triple = "x86_64-pc-windows-msvc"
335 target_triple = "x86_64-pc-windows-msvc"
429 else:
336 else:
430 raise Exception("unhandled arch: %s" % arch)
337 raise Exception("unhandled arch: %s" % arch)
431
338
432 ps = BUILD_WIX_PYTHON3.format(
339 ps = BUILD_WIX_PYTHON3.format(
433 pyoxidizer_target=target_triple,
340 pyoxidizer_target=target_triple,
434 version=version,
341 version=version,
435 )
342 )
436 else:
437 extra_args = []
438 if version:
439 extra_args.extend(['--version', version])
440
441 ps = get_vc_prefix(arch) + BUILD_WIX_PYTHON2.format(
442 arch=arch, extra_args=' '.join(extra_args)
443 )
444
343
445 run_powershell(winrm_client, ps)
344 run_powershell(winrm_client, ps)
446 copy_latest_dist(winrm_client, '*.msi', dest_path)
345 copy_latest_dist(winrm_client, '*.msi', dest_path)
@@ -474,8 +373,6 b' def run_tests(winrm_client, python_versi'
474
373
475 def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
374 def resolve_wheel_artifacts(dist_path: pathlib.Path, version: str):
476 return (
375 return (
477 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
478 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
479 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
376 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
480 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
377 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
481 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
378 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
@@ -489,8 +386,6 b' def resolve_wheel_artifacts(dist_path: p'
489
386
490 def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
387 def resolve_all_artifacts(dist_path: pathlib.Path, version: str):
491 return (
388 return (
492 dist_path / WHEEL_FILENAME_PYTHON27_X86.format(version=version),
493 dist_path / WHEEL_FILENAME_PYTHON27_X64.format(version=version),
494 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
389 dist_path / WHEEL_FILENAME_PYTHON37_X86.format(version=version),
495 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
390 dist_path / WHEEL_FILENAME_PYTHON37_X64.format(version=version),
496 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
391 dist_path / WHEEL_FILENAME_PYTHON38_X86.format(version=version),
@@ -499,24 +394,16 b' def resolve_all_artifacts(dist_path: pat'
499 dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
394 dist_path / WHEEL_FILENAME_PYTHON39_X64.format(version=version),
500 dist_path / WHEEL_FILENAME_PYTHON310_X86.format(version=version),
395 dist_path / WHEEL_FILENAME_PYTHON310_X86.format(version=version),
501 dist_path / WHEEL_FILENAME_PYTHON310_X64.format(version=version),
396 dist_path / WHEEL_FILENAME_PYTHON310_X64.format(version=version),
502 dist_path / EXE_FILENAME_PYTHON2_X86.format(version=version),
503 dist_path / EXE_FILENAME_PYTHON2_X64.format(version=version),
504 dist_path / EXE_FILENAME_PYTHON3_X86.format(version=version),
397 dist_path / EXE_FILENAME_PYTHON3_X86.format(version=version),
505 dist_path / EXE_FILENAME_PYTHON3_X64.format(version=version),
398 dist_path / EXE_FILENAME_PYTHON3_X64.format(version=version),
506 dist_path / MSI_FILENAME_PYTHON2_X86.format(version=version),
507 dist_path / MSI_FILENAME_PYTHON2_X64.format(version=version),
508 dist_path / MSI_FILENAME_PYTHON3_X86.format(version=version),
399 dist_path / MSI_FILENAME_PYTHON3_X86.format(version=version),
509 dist_path / MSI_FILENAME_PYTHON3_X64.format(version=version),
400 dist_path / MSI_FILENAME_PYTHON3_X64.format(version=version),
510 )
401 )
511
402
512
403
513 def generate_latest_dat(version: str):
404 def generate_latest_dat(version: str):
514 python2_x86_exe_filename = EXE_FILENAME_PYTHON2_X86.format(version=version)
515 python2_x64_exe_filename = EXE_FILENAME_PYTHON2_X64.format(version=version)
516 python3_x86_exe_filename = EXE_FILENAME_PYTHON3_X86.format(version=version)
405 python3_x86_exe_filename = EXE_FILENAME_PYTHON3_X86.format(version=version)
517 python3_x64_exe_filename = EXE_FILENAME_PYTHON3_X64.format(version=version)
406 python3_x64_exe_filename = EXE_FILENAME_PYTHON3_X64.format(version=version)
518 python2_x86_msi_filename = MSI_FILENAME_PYTHON2_X86.format(version=version)
519 python2_x64_msi_filename = MSI_FILENAME_PYTHON2_X64.format(version=version)
520 python3_x86_msi_filename = MSI_FILENAME_PYTHON3_X86.format(version=version)
407 python3_x86_msi_filename = MSI_FILENAME_PYTHON3_X86.format(version=version)
521 python3_x64_msi_filename = MSI_FILENAME_PYTHON3_X64.format(version=version)
408 python3_x64_msi_filename = MSI_FILENAME_PYTHON3_X64.format(version=version)
522
409
@@ -536,20 +423,6 b' def generate_latest_dat(version: str):'
536 EXE_PYTHON3_X64_DESCRIPTION.format(version=version),
423 EXE_PYTHON3_X64_DESCRIPTION.format(version=version),
537 ),
424 ),
538 (
425 (
539 '9',
540 version,
541 X86_USER_AGENT_PATTERN,
542 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_exe_filename),
543 EXE_PYTHON2_X86_DESCRIPTION.format(version=version),
544 ),
545 (
546 '9',
547 version,
548 X64_USER_AGENT_PATTERN,
549 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_exe_filename),
550 EXE_PYTHON2_X64_DESCRIPTION.format(version=version),
551 ),
552 (
553 '10',
426 '10',
554 version,
427 version,
555 X86_USER_AGENT_PATTERN,
428 X86_USER_AGENT_PATTERN,
@@ -563,20 +436,6 b' def generate_latest_dat(version: str):'
563 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_msi_filename),
436 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python3_x64_msi_filename),
564 MSI_PYTHON3_X64_DESCRIPTION.format(version=version),
437 MSI_PYTHON3_X64_DESCRIPTION.format(version=version),
565 ),
438 ),
566 (
567 '9',
568 version,
569 X86_USER_AGENT_PATTERN,
570 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x86_msi_filename),
571 MSI_PYTHON2_X86_DESCRIPTION.format(version=version),
572 ),
573 (
574 '9',
575 version,
576 X64_USER_AGENT_PATTERN,
577 '%s/%s' % (MERCURIAL_SCM_BASE_URL, python2_x64_msi_filename),
578 MSI_PYTHON2_X64_DESCRIPTION.format(version=version),
579 ),
580 )
439 )
581
440
582 lines = ['\t'.join(e) for e in entries]
441 lines = ['\t'.join(e) for e in entries]
@@ -1,6 +1,5 b''
1 # Randomized torture test generation for bdiff
1 # Randomized torture test generation for bdiff
2
2
3 from __future__ import absolute_import, print_function
4 import random
3 import random
5 import sys
4 import sys
6
5
@@ -31,7 +31,6 b' Invocation example:'
31 $ asv --config contrib/asv.conf.json preview
31 $ asv --config contrib/asv.conf.json preview
32 '''
32 '''
33
33
34 from __future__ import absolute_import
35
34
36 import functools
35 import functools
37 import os
36 import os
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from . import perfbench
9 from . import perfbench
11
10
@@ -10,7 +10,6 b''
10 Each revset benchmark is parameterized with variants (first, last, sort, ...)
10 Each revset benchmark is parameterized with variants (first, last, sort, ...)
11 '''
11 '''
12
12
13 from __future__ import absolute_import
14
13
15 import os
14 import os
16 import string
15 import string
@@ -7,7 +7,6 b''
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import, print_function
11
10
12 import argparse
11 import argparse
13 import contextlib
12 import contextlib
@@ -1,4 +1,3 b''
1 from __future__ import absolute_import
2 import __builtin__
1 import __builtin__
3 import os
2 import os
4 from mercurial import util
3 from mercurial import util
@@ -34,7 +34,6 b" Typically you'll want to place the path "
34 HGCATAPULTSERVERPIPE environment variable, which both run-tests and hg
34 HGCATAPULTSERVERPIPE environment variable, which both run-tests and hg
35 understand. To trace *only* run-tests, use HGTESTCATAPULTSERVERPIPE instead.
35 understand. To trace *only* run-tests, use HGTESTCATAPULTSERVERPIPE instead.
36 """
36 """
37 from __future__ import absolute_import, print_function
38
37
39 import argparse
38 import argparse
40 import json
39 import json
@@ -19,7 +19,6 b' when a rule triggers wrong, do one of th'
19 * ONLY use no--check-code for skipping entire files from external sources
19 * ONLY use no--check-code for skipping entire files from external sources
20 """
20 """
21
21
22 from __future__ import absolute_import, print_function
23 import glob
22 import glob
24 import keyword
23 import keyword
25 import optparse
24 import optparse
@@ -344,16 +343,6 b' commonpypats = ['
344 "linebreak after :",
343 "linebreak after :",
345 ),
344 ),
346 (
345 (
347 r'class\s[^( \n]+:',
348 "old-style class, use class foo(object)",
349 r'#.*old-style',
350 ),
351 (
352 r'class\s[^( \n]+\(\):',
353 "class foo() creates old style object, use class foo(object)",
354 r'#.*old-style',
355 ),
356 (
357 r'\b(%s)\('
346 r'\b(%s)\('
358 % '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')),
347 % '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')),
359 "Python keyword is not a function",
348 "Python keyword is not a function",
@@ -431,26 +420,6 b' commonpypats = ['
431 "module-level @cachefunc is risky, please avoid",
420 "module-level @cachefunc is risky, please avoid",
432 ),
421 ),
433 (
422 (
434 r'^import Queue',
435 "don't use Queue, use pycompat.queue.Queue + "
436 "pycompat.queue.Empty",
437 ),
438 (
439 r'^import cStringIO',
440 "don't use cStringIO.StringIO, use util.stringio",
441 ),
442 (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
443 (
444 r'^import SocketServer',
445 "don't use SockerServer, use util.socketserver",
446 ),
447 (r'^import urlparse', "don't use urlparse, use util.urlreq"),
448 (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"),
449 (r'^import cPickle', "don't use cPickle, use util.pickle"),
450 (r'^import pickle', "don't use pickle, use util.pickle"),
451 (r'^import httplib', "don't use httplib, use util.httplib"),
452 (r'^import BaseHTTPServer', "use util.httpserver instead"),
453 (
454 r'^(from|import) mercurial\.(cext|pure|cffi)',
423 r'^(from|import) mercurial\.(cext|pure|cffi)',
455 "use mercurial.policy.importmod instead",
424 "use mercurial.policy.importmod instead",
456 ),
425 ),
@@ -789,7 +758,7 b' def _preparepats():'
789 preparefilters(filters)
758 preparefilters(filters)
790
759
791
760
792 class norepeatlogger(object):
761 class norepeatlogger:
793 def __init__(self):
762 def __init__(self):
794 self._lastseen = None
763 self._lastseen = None
795
764
@@ -15,7 +15,6 b''
15 #
15 #
16 # See also: https://mercurial-scm.org/wiki/ContributingChanges
16 # See also: https://mercurial-scm.org/wiki/ContributingChanges
17
17
18 from __future__ import absolute_import, print_function
19
18
20 import os
19 import os
21 import re
20 import re
@@ -7,7 +7,6 b''
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import, print_function
11 import re
10 import re
12 import sys
11 import sys
13
12
@@ -7,7 +7,6 b''
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import, print_function
11
10
12 import ast
11 import ast
13 import importlib
12 import importlib
@@ -17,31 +16,6 b' import traceback'
17 import warnings
16 import warnings
18
17
19
18
20 def check_compat_py2(f):
21 """Check Python 3 compatibility for a file with Python 2"""
22 with open(f, 'rb') as fh:
23 content = fh.read()
24 root = ast.parse(content)
25
26 # Ignore empty files.
27 if not root.body:
28 return
29
30 futures = set()
31 haveprint = False
32 for node in ast.walk(root):
33 if isinstance(node, ast.ImportFrom):
34 if node.module == '__future__':
35 futures |= {n.name for n in node.names}
36 elif isinstance(node, ast.Print):
37 haveprint = True
38
39 if 'absolute_import' not in futures:
40 print('%s not using absolute_import' % f)
41 if haveprint and 'print_function' not in futures:
42 print('%s requires print_function' % f)
43
44
45 def check_compat_py3(f):
19 def check_compat_py3(f):
46 """Check Python 3 compatibility of a file with Python 3."""
20 """Check Python 3 compatibility of a file with Python 3."""
47 with open(f, 'rb') as fh:
21 with open(f, 'rb') as fh:
@@ -94,23 +68,19 b' def check_compat_py3(f):'
94
68
95
69
96 if __name__ == '__main__':
70 if __name__ == '__main__':
97 if sys.version_info[0] == 2:
71 # check_compat_py3 will import every filename we specify as long as it
98 fn = check_compat_py2
72 # starts with one of a few prefixes. It does this by converting
99 else:
73 # specified filenames like 'mercurial/foo.py' to 'mercurial.foo' and
100 # check_compat_py3 will import every filename we specify as long as it
74 # importing that. When running standalone (not as part of a test), this
101 # starts with one of a few prefixes. It does this by converting
75 # means we actually import the installed versions, not the files we just
102 # specified filenames like 'mercurial/foo.py' to 'mercurial.foo' and
76 # specified. When running as test-check-py3-compat.t, we technically
103 # importing that. When running standalone (not as part of a test), this
77 # would import the correct paths, but it's cleaner to have both cases
104 # means we actually import the installed versions, not the files we just
78 # use the same import logic.
105 # specified. When running as test-check-py3-compat.t, we technically
79 sys.path.insert(0, os.getcwd())
106 # would import the correct paths, but it's cleaner to have both cases
107 # use the same import logic.
108 sys.path.insert(0, os.getcwd())
109 fn = check_compat_py3
110
80
111 for f in sys.argv[1:]:
81 for f in sys.argv[1:]:
112 with warnings.catch_warnings(record=True) as warns:
82 with warnings.catch_warnings(record=True) as warns:
113 fn(f)
83 check_compat_py3(f)
114
84
115 for w in warns:
85 for w in warns:
116 print(
86 print(
@@ -7,7 +7,6 b''
7 # $ ./hg serve --cmds pipe | ./contrib/debugcmdserver.py -
7 # $ ./hg serve --cmds pipe | ./contrib/debugcmdserver.py -
8 # o, 52 -> 'capabilities: getencoding runcommand\nencoding: UTF-8'
8 # o, 52 -> 'capabilities: getencoding runcommand\nencoding: UTF-8'
9
9
10 from __future__ import absolute_import, print_function
11 import struct
10 import struct
12 import sys
11 import sys
13
12
@@ -1,7 +1,6 b''
1 # debugshell extension
1 # debugshell extension
2 """a python shell with repo, changelog & manifest objects"""
2 """a python shell with repo, changelog & manifest objects"""
3
3
4 from __future__ import absolute_import
5 import code
4 import code
6 import mercurial
5 import mercurial
7 import sys
6 import sys
@@ -2,7 +2,6 b''
2 # Dump revlogs as raw data stream
2 # Dump revlogs as raw data stream
3 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
3 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
4
4
5 from __future__ import absolute_import, print_function
6
5
7 import sys
6 import sys
8 from mercurial.node import hex
7 from mercurial.node import hex
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
1 import argparse
4 import zipfile
2 import zipfile
5
3
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
1 import argparse
4 import os
2 import os
5 import zipfile
3 import zipfile
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
1 import argparse
4 import zipfile
2 import zipfile
5
3
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
1 import argparse
4 import zipfile
2 import zipfile
5
3
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
1 import argparse
4 import os
2 import os
5 import struct
3 import struct
@@ -22,7 +20,7 b' args = ap.parse_args()'
22
20
23 if sys.version_info[0] < 3:
21 if sys.version_info[0] < 3:
24
22
25 class py2reprhack(object):
23 class py2reprhack:
26 def __repr__(self):
24 def __repr__(self):
27 """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__"""
25 """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__"""
28 return self.__bytes__()
26 return self.__bytes__()
@@ -30,7 +28,7 b' if sys.version_info[0] < 3:'
30
28
31 else:
29 else:
32
30
33 class py2reprhack(object):
31 class py2reprhack:
34 """Not needed on py3."""
32 """Not needed on py3."""
35
33
36
34
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import argparse
1 import argparse
4 import os
2 import os
5 import zipfile
3 import zipfile
@@ -1,5 +1,4 b''
1 #!/usr/bin/env python2
1 #!/usr/bin/env python2
2 from __future__ import absolute_import, print_function
3
2
4 import argparse
3 import argparse
5 import os
4 import os
@@ -1,6 +1,5 b''
1 stages:
1 stages:
2 - tests
2 - tests
3 - phabricator
4
3
5 image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
4 image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
6
5
@@ -30,31 +29,22 b' variables:'
30 - echo "$RUNTEST_ARGS"
29 - echo "$RUNTEST_ARGS"
31 - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
30 - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
32
31
33 checks-py2:
32 checks:
34 <<: *runtests
35 variables:
36 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
37
38 checks-py3:
39 <<: *runtests
33 <<: *runtests
40 variables:
34 variables:
41 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
35 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
42 PYTHON: python3
36 PYTHON: python3
43
37
44 rust-cargo-test-py3:
38 rust-cargo-test:
39 <<: *all
45 stage: tests
40 stage: tests
46 script:
41 script:
47 - echo "python used, $PYTHON"
42 - echo "python used, $PYTHON"
48 - make rust-tests
43 - make rust-tests
44 variables:
45 PYTHON: python3
49
46
50 test-py2:
47 test-c:
51 <<: *runtests
52 variables:
53 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
54 TEST_HGMODULEPOLICY: "c"
55 TEST_HGTESTS_ALLOW_NETIO: "1"
56
57 test-py3:
58 <<: *runtests
48 <<: *runtests
59 variables:
49 variables:
60 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
50 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
@@ -62,20 +52,14 b' test-py3:'
62 TEST_HGMODULEPOLICY: "c"
52 TEST_HGMODULEPOLICY: "c"
63 TEST_HGTESTS_ALLOW_NETIO: "1"
53 TEST_HGTESTS_ALLOW_NETIO: "1"
64
54
65 test-py2-pure:
55 test-pure:
66 <<: *runtests
67 variables:
68 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
69 TEST_HGMODULEPOLICY: "py"
70
71 test-py3-pure:
72 <<: *runtests
56 <<: *runtests
73 variables:
57 variables:
74 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
58 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
75 PYTHON: python3
59 PYTHON: python3
76 TEST_HGMODULEPOLICY: "py"
60 TEST_HGMODULEPOLICY: "py"
77
61
78 test-py3-rust:
62 test-rust:
79 <<: *runtests
63 <<: *runtests
80 variables:
64 variables:
81 HGWITHRUSTEXT: cpython
65 HGWITHRUSTEXT: cpython
@@ -83,7 +67,7 b' test-py3-rust:'
83 PYTHON: python3
67 PYTHON: python3
84 TEST_HGMODULEPOLICY: "rust+c"
68 TEST_HGMODULEPOLICY: "rust+c"
85
69
86 test-py3-rhg:
70 test-rhg:
87 <<: *runtests
71 <<: *runtests
88 variables:
72 variables:
89 HGWITHRUSTEXT: cpython
73 HGWITHRUSTEXT: cpython
@@ -91,20 +75,14 b' test-py3-rhg:'
91 PYTHON: python3
75 PYTHON: python3
92 TEST_HGMODULEPOLICY: "rust+c"
76 TEST_HGMODULEPOLICY: "rust+c"
93
77
94 test-py2-chg:
78 test-chg:
95 <<: *runtests
96 variables:
97 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
98 TEST_HGMODULEPOLICY: "c"
99
100 test-py3-chg:
101 <<: *runtests
79 <<: *runtests
102 variables:
80 variables:
103 PYTHON: python3
81 PYTHON: python3
104 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
82 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
105 TEST_HGMODULEPOLICY: "c"
83 TEST_HGMODULEPOLICY: "c"
106
84
107 check-pytype-py3:
85 check-pytype:
108 extends: .runtests_template
86 extends: .runtests_template
109 before_script:
87 before_script:
110 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
88 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
@@ -142,7 +120,7 b' check-pytype-py3:'
142
120
143 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS'
121 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS'
144
122
145 windows-py3:
123 windows:
146 <<: *windows_runtests
124 <<: *windows_runtests
147 tags:
125 tags:
148 - windows
126 - windows
@@ -151,7 +129,7 b' windows-py3:'
151 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt"
129 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt"
152 PYTHON: py -3
130 PYTHON: py -3
153
131
154 windows-py3-pyox:
132 windows-pyox:
155 <<: *windows_runtests
133 <<: *windows_runtests
156 tags:
134 tags:
157 - windows
135 - windows
@@ -28,7 +28,6 b' command="cd repos && hg-ssh user/thomas/'
28 You can also add a --read-only flag to allow read-only access to a key, e.g.:
28 You can also add a --read-only flag to allow read-only access to a key, e.g.:
29 command="hg-ssh --read-only repos/*"
29 command="hg-ssh --read-only repos/*"
30 """
30 """
31 from __future__ import absolute_import
32
31
33 import os
32 import os
34 import re
33 import re
@@ -1,6 +1,5 b''
1 # A minimal client for Mercurial's command server
1 # A minimal client for Mercurial's command server
2
2
3 from __future__ import absolute_import, print_function
4
3
5 import io
4 import io
6 import os
5 import os
@@ -50,7 +49,7 b' def connectpipe(path=None, extraargs=())'
50 return server
49 return server
51
50
52
51
53 class unixconnection(object):
52 class unixconnection:
54 def __init__(self, sockpath):
53 def __init__(self, sockpath):
55 self.sock = sock = socket.socket(socket.AF_UNIX)
54 self.sock = sock = socket.socket(socket.AF_UNIX)
56 sock.connect(sockpath)
55 sock.connect(sockpath)
@@ -63,7 +62,7 b' class unixconnection(object):'
63 self.sock.close()
62 self.sock.close()
64
63
65
64
66 class unixserver(object):
65 class unixserver:
67 def __init__(self, sockpath, logpath=None, repopath=None):
66 def __init__(self, sockpath, logpath=None, repopath=None):
68 self.sockpath = sockpath
67 self.sockpath = sockpath
69 cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath]
68 cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath]
@@ -1,6 +1,5 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2
2
3 from __future__ import absolute_import, print_function
4
3
5 import ast
4 import ast
6 import collections
5 import collections
@@ -20,10 +19,11 b' if True: # disable lexical sorting chec'
20
19
21 import testparseutil
20 import testparseutil
22
21
23 # Whitelist of modules that symbols can be directly imported from.
22 # Allow list of modules that symbols can be directly imported from.
24 allowsymbolimports = (
23 allowsymbolimports = (
25 '__future__',
24 '__future__',
26 'breezy',
25 'breezy',
26 'concurrent',
27 'hgclient',
27 'hgclient',
28 'mercurial',
28 'mercurial',
29 'mercurial.hgweb.common',
29 'mercurial.hgweb.common',
@@ -46,9 +46,10 b' allowsymbolimports = ('
46 'mercurial.thirdparty.attr',
46 'mercurial.thirdparty.attr',
47 'mercurial.thirdparty.zope',
47 'mercurial.thirdparty.zope',
48 'mercurial.thirdparty.zope.interface',
48 'mercurial.thirdparty.zope.interface',
49 'typing',
49 )
50 )
50
51
51 # Whitelist of symbols that can be directly imported.
52 # Allow list of symbols that can be directly imported.
52 directsymbols = ('demandimport',)
53 directsymbols = ('demandimport',)
53
54
54 # Modules that must be aliased because they are commonly confused with
55 # Modules that must be aliased because they are commonly confused with
@@ -58,21 +59,6 b' requirealias = {'
58 }
59 }
59
60
60
61
61 def usingabsolute(root):
62 """Whether absolute imports are being used."""
63 if sys.version_info[0] >= 3:
64 return True
65
66 for node in ast.walk(root):
67 if isinstance(node, ast.ImportFrom):
68 if node.module == '__future__':
69 for n in node.names:
70 if n.name == 'absolute_import':
71 return True
72
73 return False
74
75
76 def walklocal(root):
62 def walklocal(root):
77 """Recursively yield all descendant nodes but not in a different scope"""
63 """Recursively yield all descendant nodes but not in a different scope"""
78 todo = collections.deque(ast.iter_child_nodes(root))
64 todo = collections.deque(ast.iter_child_nodes(root))
@@ -402,21 +388,10 b' def imported_modules(source, modulename,'
402
388
403
389
404 def verify_import_convention(module, source, localmods):
390 def verify_import_convention(module, source, localmods):
405 """Verify imports match our established coding convention.
391 """Verify imports match our established coding convention."""
406
392 root = ast.parse(source)
407 We have 2 conventions: legacy and modern. The modern convention is in
408 effect when using absolute imports.
409
393
410 The legacy convention only looks for mixed imports. The modern convention
394 return verify_modern_convention(module, root, localmods)
411 is much more thorough.
412 """
413 root = ast.parse(source)
414 absolute = usingabsolute(root)
415
416 if absolute:
417 return verify_modern_convention(module, root, localmods)
418 else:
419 return verify_stdlib_on_own_line(root)
420
395
421
396
422 def verify_modern_convention(module, root, localmods, root_col_offset=0):
397 def verify_modern_convention(module, root, localmods, root_col_offset=0):
@@ -617,33 +592,6 b' def verify_modern_convention(module, roo'
617 )
592 )
618
593
619
594
620 def verify_stdlib_on_own_line(root):
621 """Given some python source, verify that stdlib imports are done
622 in separate statements from relative local module imports.
623
624 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo')))
625 [('mixed imports\\n stdlib: sys\\n relative: foo', 1)]
626 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os')))
627 []
628 >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar')))
629 []
630 """
631 for node in ast.walk(root):
632 if isinstance(node, ast.Import):
633 from_stdlib = {False: [], True: []}
634 for n in node.names:
635 from_stdlib[n.name in stdlib_modules].append(n.name)
636 if from_stdlib[True] and from_stdlib[False]:
637 yield (
638 'mixed imports\n stdlib: %s\n relative: %s'
639 % (
640 ', '.join(sorted(from_stdlib[True])),
641 ', '.join(sorted(from_stdlib[False])),
642 ),
643 node.lineno,
644 )
645
646
647 class CircularImport(Exception):
595 class CircularImport(Exception):
648 pass
596 pass
649
597
@@ -679,7 +627,6 b' def find_cycles(imports):'
679
627
680 All module names recorded in `imports` should be absolute one.
628 All module names recorded in `imports` should be absolute one.
681
629
682 >>> from __future__ import print_function
683 >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
630 >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
684 ... 'top.bar': ['top.baz', 'sys'],
631 ... 'top.bar': ['top.baz', 'sys'],
685 ... 'top.baz': ['top.foo'],
632 ... 'top.baz': ['top.foo'],
@@ -29,19 +29,19 b''
29 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10-amd64.exe"
29 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10-amd64.exe"
30 $PYTHON38_x64_SHA256 = "7628244cb53408b50639d2c1287c659f4e29d3dfdb9084b11aed5870c0c6a48a"
30 $PYTHON38_x64_SHA256 = "7628244cb53408b50639d2c1287c659f4e29d3dfdb9084b11aed5870c0c6a48a"
31
31
32 $PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9.exe"
32 $PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.12/python-3.9.12.exe"
33 $PYTHON39_x86_SHA256 = "6646a5683adf14d35e8c53aab946895bc0f0b825f7acac3a62cc85ee7d0dc71a"
33 $PYTHON39_x86_SHA256 = "3d883326f30ac231c06b33f2a8ea700a185c20bf98d01da118079e9134d5fd20"
34 $PYTHON39_X64_URL = "https://www.python.org/ftp/python/3.9.9/python-3.9.9-amd64.exe"
34 $PYTHON39_X64_URL = "https://www.python.org/ftp/python/3.9.12/python-3.9.12-amd64.exe"
35 $PYTHON39_x64_SHA256 = "137d59e5c0b01a8f1bdcba08344402ae658c81c6bf03b6602bd8b4e951ad0714"
35 $PYTHON39_x64_SHA256 = "2ba57ab2281094f78fc0227a27f4d47c90d94094e7cca35ce78419e616b3cb63"
36
36
37 $PYTHON310_x86_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0.exe"
37 $PYTHON310_x86_URL = "https://www.python.org/ftp/python/3.10.4/python-3.10.4.exe"
38 $PYTHON310_x86_SHA256 = "ea896eeefb1db9e12fb89ec77a6e28c9fe52b4a162a34c85d9688be2ec2392e8"
38 $PYTHON310_x86_SHA256 = "97c37c53c7a826f5b00e185754ab2a324a919f7afc469b20764b71715c80041d"
39 $PYTHON310_X64_URL = "https://www.python.org/ftp/python/3.10.0/python-3.10.0-amd64.exe"
39 $PYTHON310_X64_URL = "https://www.python.org/ftp/python/3.10.4/python-3.10.4-amd64.exe"
40 $PYTHON310_x64_SHA256 = "cb580eb7dc55f9198e650f016645023e8b2224cf7d033857d12880b46c5c94ef"
40 $PYTHON310_x64_SHA256 = "a81fc4180f34e5733c3f15526c668ff55de096366f9006d8a44c0336704e50f1"
41
41
42 # PIP 19.2.3.
42 # PIP 22.0.4.
43 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
43 $PIP_URL = "https://github.com/pypa/get-pip/raw/38e54e5de07c66e875c11a1ebbdb938854625dd8/public/get-pip.py"
44 $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe"
44 $PIP_SHA256 = "e235c437e5c7d7524fbce3880ca39b917a73dc565e0c813465b7a7a329bb279a"
45
45
46 $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
46 $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe"
47 $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
47 $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538"
@@ -90,7 +90,13 b' function Invoke-Process($path, $argument'
90 $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden
90 $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden
91
91
92 if ($p.ExitCode -ne 0) {
92 if ($p.ExitCode -ne 0) {
93 throw "process exited non-0: $($p.ExitCode)"
93 # If the MSI is already installed, ignore the error
94 if ($p.ExitCode -eq 1638) {
95 Write-Output "program already installed; continuing..."
96 }
97 else {
98 throw "process exited non-0: $($p.ExitCode)"
99 }
94 }
100 }
95 }
101 }
96
102
@@ -150,7 +156,7 b' function Install-Dependencies($prefix) {'
150 Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip}
156 Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip}
151 Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip}
157 Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip}
152 Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip}
158 Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip}
153 # Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
159 Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip}
154 Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip}
160 Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip}
155 Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip}
161 Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip}
156 Install-Python3 "Python 3.10 32-bit" ${prefix}\assets\python310-x86.exe ${prefix}\python310-x86 ${pip}
162 Install-Python3 "Python 3.10 32-bit" ${prefix}\assets\python310-x86.exe ${prefix}\python310-x86 ${pip}
@@ -11,8 +11,6 b' Reads current and peak memory usage from'
11 prints it to ``stderr`` on exit.
11 prints it to ``stderr`` on exit.
12 '''
12 '''
13
13
14 from __future__ import absolute_import
15
16
14
17 def memusage(ui):
15 def memusage(ui):
18 """Report memory usage of the current process."""
16 """Report memory usage of the current process."""
@@ -92,10 +92,8 b' override_dh_auto_install: $(DEB_HG_PYTHO'
92 mkdir -p "$(CURDIR)"/debian/mercurial/etc/mercurial/hgrc.d/
92 mkdir -p "$(CURDIR)"/debian/mercurial/etc/mercurial/hgrc.d/
93 cp contrib/packaging/debian/*.rc "$(CURDIR)"/debian/mercurial/etc/mercurial/hgrc.d/
93 cp contrib/packaging/debian/*.rc "$(CURDIR)"/debian/mercurial/etc/mercurial/hgrc.d/
94 # completions
94 # completions
95 mkdir -p "$(CURDIR)"/debian/mercurial/usr/share/bash-completion/completions
96 cp contrib/bash_completion "$(CURDIR)"/debian/mercurial/usr/share/bash-completion/completions/hg
97 mkdir -p "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions
95 mkdir -p "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions
98 cp contrib/zsh_completion "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions/_hg
96 mv "$(CURDIR)"/debian/mercurial/usr/share/zsh/site-functions/_hg "$(CURDIR)"/debian/mercurial/usr/share/zsh/vendor-completions/_hg
99 if [ "$(DEB_HG_CHG_BY_DEFAULT)" -eq 1 ]; then \
97 if [ "$(DEB_HG_CHG_BY_DEFAULT)" -eq 1 ]; then \
100 mkdir -p "$(CURDIR)"/debian/mercurial/usr/lib/mercurial; \
98 mkdir -p "$(CURDIR)"/debian/mercurial/usr/lib/mercurial; \
101 mv "$(CURDIR)"/debian/mercurial/usr/bin/hg "$(CURDIR)"/debian/mercurial/usr/lib/mercurial/hg; \
99 mv "$(CURDIR)"/debian/mercurial/usr/bin/hg "$(CURDIR)"/debian/mercurial/usr/lib/mercurial/hg; \
@@ -20,13 +20,7 b' HERE = pathlib.Path(os.path.abspath(os.p'
20 SOURCE_DIR = HERE.parent.parent.parent
20 SOURCE_DIR = HERE.parent.parent.parent
21
21
22
22
23 def build_inno(pyoxidizer_target=None, python=None, iscc=None, version=None):
23 def build_inno(pyoxidizer_target, iscc=None, version=None):
24 if not pyoxidizer_target and not python:
25 raise Exception("--python required unless building with PyOxidizer")
26
27 if python and not os.path.isabs(python):
28 raise Exception("--python arg must be an absolute path")
29
30 if iscc:
24 if iscc:
31 iscc = pathlib.Path(iscc)
25 iscc = pathlib.Path(iscc)
32 else:
26 else:
@@ -38,59 +32,30 b' def build_inno(pyoxidizer_target=None, p'
38
32
39 build_dir = SOURCE_DIR / "build"
33 build_dir = SOURCE_DIR / "build"
40
34
41 if pyoxidizer_target:
35 inno.build_with_pyoxidizer(
42 inno.build_with_pyoxidizer(
36 SOURCE_DIR, build_dir, pyoxidizer_target, iscc, version=version
43 SOURCE_DIR, build_dir, pyoxidizer_target, iscc, version=version
37 )
44 )
45 else:
46 inno.build_with_py2exe(
47 SOURCE_DIR,
48 build_dir,
49 pathlib.Path(python),
50 iscc,
51 version=version,
52 )
53
38
54
39
55 def build_wix(
40 def build_wix(
41 pyoxidizer_target,
56 name=None,
42 name=None,
57 pyoxidizer_target=None,
58 python=None,
59 version=None,
43 version=None,
60 sign_sn=None,
44 sign_sn=None,
61 sign_cert=None,
45 sign_cert=None,
62 sign_password=None,
46 sign_password=None,
63 sign_timestamp_url=None,
47 sign_timestamp_url=None,
64 extra_packages_script=None,
65 extra_wxs=None,
48 extra_wxs=None,
66 extra_features=None,
49 extra_features=None,
67 extra_pyoxidizer_vars=None,
50 extra_pyoxidizer_vars=None,
68 ):
51 ):
69 if not pyoxidizer_target and not python:
70 raise Exception("--python required unless building with PyOxidizer")
71
72 if python and not os.path.isabs(python):
73 raise Exception("--python arg must be an absolute path")
74
75 kwargs = {
52 kwargs = {
76 "source_dir": SOURCE_DIR,
53 "source_dir": SOURCE_DIR,
77 "version": version,
54 "version": version,
55 "target_triple": pyoxidizer_target,
56 "extra_pyoxidizer_vars": extra_pyoxidizer_vars,
78 }
57 }
79
58
80 if pyoxidizer_target:
81 fn = wix.build_installer_pyoxidizer
82 kwargs["target_triple"] = pyoxidizer_target
83 kwargs["extra_pyoxidizer_vars"] = extra_pyoxidizer_vars
84 else:
85 fn = wix.build_installer_py2exe
86 kwargs["python_exe"] = pathlib.Path(python)
87
88 if extra_packages_script:
89 if pyoxidizer_target:
90 raise Exception(
91 "pyoxidizer does not support --extra-packages-script"
92 )
93 kwargs["extra_packages_script"] = extra_packages_script
94 if extra_wxs:
59 if extra_wxs:
95 kwargs["extra_wxs"] = dict(
60 kwargs["extra_wxs"] = dict(
96 thing.split("=") for thing in extra_wxs.split(",")
61 thing.split("=") for thing in extra_wxs.split(",")
@@ -107,7 +72,7 b' def build_wix('
107 "timestamp_url": sign_timestamp_url,
72 "timestamp_url": sign_timestamp_url,
108 }
73 }
109
74
110 fn(**kwargs)
75 wix.build_installer_pyoxidizer(**kwargs)
111
76
112
77
113 def get_parser():
78 def get_parser():
@@ -119,14 +84,14 b' def get_parser():'
119 sp.add_argument(
84 sp.add_argument(
120 "--pyoxidizer-target",
85 "--pyoxidizer-target",
121 choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"},
86 choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"},
87 required=True,
122 help="Build with PyOxidizer targeting this host triple",
88 help="Build with PyOxidizer targeting this host triple",
123 )
89 )
124 sp.add_argument("--python", help="path to python.exe to use")
125 sp.add_argument("--iscc", help="path to iscc.exe to use")
90 sp.add_argument("--iscc", help="path to iscc.exe to use")
126 sp.add_argument(
91 sp.add_argument(
127 "--version",
92 "--version",
128 help="Mercurial version string to use "
93 help="Mercurial version string to use "
129 "(detected from __version__.py if not defined",
94 "(detected from __version__.py if not defined)",
130 )
95 )
131 sp.set_defaults(func=build_inno)
96 sp.set_defaults(func=build_inno)
132
97
@@ -137,9 +102,9 b' def get_parser():'
137 sp.add_argument(
102 sp.add_argument(
138 "--pyoxidizer-target",
103 "--pyoxidizer-target",
139 choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"},
104 choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"},
105 required=True,
140 help="Build with PyOxidizer targeting this host triple",
106 help="Build with PyOxidizer targeting this host triple",
141 )
107 )
142 sp.add_argument("--python", help="Path to Python executable to use")
143 sp.add_argument(
108 sp.add_argument(
144 "--sign-sn",
109 "--sign-sn",
145 help="Subject name (or fragment thereof) of certificate "
110 help="Subject name (or fragment thereof) of certificate "
@@ -155,12 +120,6 b' def get_parser():'
155 )
120 )
156 sp.add_argument("--version", help="Version string to use")
121 sp.add_argument("--version", help="Version string to use")
157 sp.add_argument(
122 sp.add_argument(
158 "--extra-packages-script",
159 help=(
160 "Script to execute to include extra packages in " "py2exe binary."
161 ),
162 )
163 sp.add_argument(
164 "--extra-wxs", help="CSV of path_to_wxs_file=working_dir_for_wxs_file"
123 "--extra-wxs", help="CSV of path_to_wxs_file=working_dir_for_wxs_file"
165 )
124 )
166 sp.add_argument(
125 sp.add_argument(
@@ -10,6 +10,7 b''
10 import gzip
10 import gzip
11 import hashlib
11 import hashlib
12 import pathlib
12 import pathlib
13 import typing
13 import urllib.request
14 import urllib.request
14
15
15
16
@@ -25,48 +26,6 b' DOWNLOADS = {'
25 'size': 715086,
26 'size': 715086,
26 'sha256': '411f94974492fd2ecf52590cb05b1023530aec67e64154a88b1e4ebcd9c28588',
27 'sha256': '411f94974492fd2ecf52590cb05b1023530aec67e64154a88b1e4ebcd9c28588',
27 },
28 },
28 'py2exe': {
29 'url': 'https://versaweb.dl.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.zip',
30 'size': 149687,
31 'sha256': '6bd383312e7d33eef2e43a5f236f9445e4f3e0f6b16333c6f183ed445c44ddbd',
32 'version': '0.6.9',
33 },
34 # The VC9 CRT merge modules aren't readily available on most systems because
35 # they are only installed as part of a full Visual Studio 2008 install.
36 # While we could potentially extract them from a Visual Studio 2008
37 # installer, it is easier to just fetch them from a known URL.
38 'vc9-crt-x86-msm': {
39 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/Microsoft_VC90_CRT_x86.msm',
40 'size': 615424,
41 'sha256': '837e887ef31b332feb58156f429389de345cb94504228bb9a523c25a9dd3d75e',
42 },
43 'vc9-crt-x86-msm-policy': {
44 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/policy_9_0_Microsoft_VC90_CRT_x86.msm',
45 'size': 71168,
46 'sha256': '3fbcf92e3801a0757f36c5e8d304e134a68d5cafd197a6df7734ae3e8825c940',
47 },
48 'vc9-crt-x64-msm': {
49 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/Microsoft_VC90_CRT_x86_x64.msm',
50 'size': 662528,
51 'sha256': '50d9639b5ad4844a2285269c7551bf5157ec636e32396ddcc6f7ec5bce487a7c',
52 },
53 'vc9-crt-x64-msm-policy': {
54 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/policy_9_0_Microsoft_VC90_CRT_x86_x64.msm',
55 'size': 71168,
56 'sha256': '0550ea1929b21239134ad3a678c944ba0f05f11087117b6cf0833e7110686486',
57 },
58 'virtualenv': {
59 'url': 'https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/virtualenv-16.4.3.tar.gz',
60 'size': 3713208,
61 'sha256': '984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39',
62 'version': '16.4.3',
63 },
64 'wix': {
65 'url': 'https://github.com/wixtoolset/wix3/releases/download/wix3111rtm/wix311-binaries.zip',
66 'size': 34358269,
67 'sha256': '37f0a533b0978a454efb5dc3bd3598becf9660aaf4287e55bf68ca6b527d051d',
68 'version': '3.11.1',
69 },
70 }
29 }
71
30
72
31
@@ -168,8 +127,8 b' def download_to_path(url: str, path: pat'
168
127
169
128
170 def download_entry(
129 def download_entry(
171 name: dict, dest_path: pathlib.Path, local_name=None
130 name: str, dest_path: pathlib.Path, local_name=None
172 ) -> pathlib.Path:
131 ) -> typing.Tuple[pathlib.Path, typing.Dict[str, typing.Union[str, int]]]:
173 entry = DOWNLOADS[name]
132 entry = DOWNLOADS[name]
174
133
175 url = entry['url']
134 url = entry['url']
@@ -14,29 +14,13 b' import subprocess'
14
14
15 import jinja2
15 import jinja2
16
16
17 from .py2exe import (
18 build_py2exe,
19 stage_install,
20 )
21 from .pyoxidizer import create_pyoxidizer_install_layout
17 from .pyoxidizer import create_pyoxidizer_install_layout
22 from .util import (
18 from .util import (
23 find_legacy_vc_runtime_files,
24 normalize_windows_version,
19 normalize_windows_version,
25 process_install_rules,
20 process_install_rules,
26 read_version_py,
21 read_version_py,
27 )
22 )
28
23
29 EXTRA_PACKAGES = {
30 'dulwich',
31 'keyring',
32 'pygments',
33 'win32ctypes',
34 }
35
36 EXTRA_INCLUDES = {
37 '_curses',
38 '_curses_panel',
39 }
40
24
41 EXTRA_INSTALL_RULES = [
25 EXTRA_INSTALL_RULES = [
42 ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'),
26 ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'),
@@ -47,80 +31,6 b' PACKAGE_FILES_METADATA = {'
47 }
31 }
48
32
49
33
50 def build_with_py2exe(
51 source_dir: pathlib.Path,
52 build_dir: pathlib.Path,
53 python_exe: pathlib.Path,
54 iscc_exe: pathlib.Path,
55 version=None,
56 ):
57 """Build the Inno installer using py2exe.
58
59 Build files will be placed in ``build_dir``.
60
61 py2exe's setup.py doesn't use setuptools. It doesn't have modern logic
62 for finding the Python 2.7 toolchain. So, we require the environment
63 to already be configured with an active toolchain.
64 """
65 if not iscc_exe.exists():
66 raise Exception('%s does not exist' % iscc_exe)
67
68 vc_x64 = r'\x64' in os.environ.get('LIB', '')
69 arch = 'x64' if vc_x64 else 'x86'
70 inno_build_dir = build_dir / ('inno-py2exe-%s' % arch)
71 staging_dir = inno_build_dir / 'stage'
72
73 requirements_txt = (
74 source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt'
75 )
76
77 inno_build_dir.mkdir(parents=True, exist_ok=True)
78
79 build_py2exe(
80 source_dir,
81 build_dir,
82 python_exe,
83 'inno',
84 requirements_txt,
85 extra_packages=EXTRA_PACKAGES,
86 extra_includes=EXTRA_INCLUDES,
87 )
88
89 # Purge the staging directory for every build so packaging is
90 # pristine.
91 if staging_dir.exists():
92 print('purging %s' % staging_dir)
93 shutil.rmtree(staging_dir)
94
95 # Now assemble all the packaged files into the staging directory.
96 stage_install(source_dir, staging_dir)
97
98 # We also install some extra files.
99 process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
100
101 # hg.exe depends on VC9 runtime DLLs. Copy those into place.
102 for f in find_legacy_vc_runtime_files(vc_x64):
103 if f.name.endswith('.manifest'):
104 basename = 'Microsoft.VC90.CRT.manifest'
105 else:
106 basename = f.name
107
108 dest_path = staging_dir / basename
109
110 print('copying %s to %s' % (f, dest_path))
111 shutil.copyfile(f, dest_path)
112
113 build_installer(
114 source_dir,
115 inno_build_dir,
116 staging_dir,
117 iscc_exe,
118 version,
119 arch="x64" if vc_x64 else None,
120 suffix="-python2",
121 )
122
123
124 def build_with_pyoxidizer(
34 def build_with_pyoxidizer(
125 source_dir: pathlib.Path,
35 source_dir: pathlib.Path,
126 build_dir: pathlib.Path,
36 build_dir: pathlib.Path,
@@ -23,7 +23,6 b' from .util import ('
23
23
24
24
25 STAGING_RULES_WINDOWS = [
25 STAGING_RULES_WINDOWS = [
26 ('contrib/bash_completion', 'contrib/'),
27 ('contrib/hgk', 'contrib/hgk.tcl'),
26 ('contrib/hgk', 'contrib/hgk.tcl'),
28 ('contrib/hgweb.fcgi', 'contrib/'),
27 ('contrib/hgweb.fcgi', 'contrib/'),
29 ('contrib/hgweb.wsgi', 'contrib/'),
28 ('contrib/hgweb.wsgi', 'contrib/'),
@@ -36,7 +35,6 b' STAGING_RULES_WINDOWS = ['
36 ('contrib/win32/postinstall.txt', 'ReleaseNotes.txt'),
35 ('contrib/win32/postinstall.txt', 'ReleaseNotes.txt'),
37 ('contrib/win32/ReadMe.html', 'ReadMe.html'),
36 ('contrib/win32/ReadMe.html', 'ReadMe.html'),
38 ('contrib/xml.rnc', 'contrib/'),
37 ('contrib/xml.rnc', 'contrib/'),
39 ('contrib/zsh_completion', 'contrib/'),
40 ('doc/*.html', 'doc/'),
38 ('doc/*.html', 'doc/'),
41 ('doc/style.css', 'doc/'),
39 ('doc/style.css', 'doc/'),
42 ('COPYING', 'Copying.txt'),
40 ('COPYING', 'Copying.txt'),
@@ -7,23 +7,15 b''
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import distutils.version
11 import getpass
12 import glob
10 import glob
13 import os
11 import os
14 import pathlib
12 import pathlib
15 import re
13 import re
16 import shutil
14 import shutil
17 import subprocess
15 import subprocess
18 import tarfile
19 import zipfile
16 import zipfile
20
17
21
18
22 def extract_tar_to_directory(source: pathlib.Path, dest: pathlib.Path):
23 with tarfile.open(source, 'r') as tf:
24 tf.extractall(dest)
25
26
27 def extract_zip_to_directory(source: pathlib.Path, dest: pathlib.Path):
19 def extract_zip_to_directory(source: pathlib.Path, dest: pathlib.Path):
28 with zipfile.ZipFile(source, 'r') as zf:
20 with zipfile.ZipFile(source, 'r') as zf:
29 zf.extractall(dest)
21 zf.extractall(dest)
@@ -81,59 +73,6 b' def find_vc_runtime_dll(x64=False):'
81 raise Exception("could not find vcruntime140.dll")
73 raise Exception("could not find vcruntime140.dll")
82
74
83
75
84 def find_legacy_vc_runtime_files(x64=False):
85 """Finds Visual C++ Runtime DLLs to include in distribution."""
86 winsxs = pathlib.Path(os.environ['SYSTEMROOT']) / 'WinSxS'
87
88 prefix = 'amd64' if x64 else 'x86'
89
90 candidates = sorted(
91 p
92 for p in os.listdir(winsxs)
93 if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix)
94 )
95
96 for p in candidates:
97 print('found candidate VC runtime: %s' % p)
98
99 # Take the newest version.
100 version = candidates[-1]
101
102 d = winsxs / version
103
104 return [
105 d / 'msvcm90.dll',
106 d / 'msvcp90.dll',
107 d / 'msvcr90.dll',
108 winsxs / 'Manifests' / ('%s.manifest' % version),
109 ]
110
111
112 def windows_10_sdk_info():
113 """Resolves information about the Windows 10 SDK."""
114
115 base = pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Windows Kits' / '10'
116
117 if not base.is_dir():
118 raise Exception('unable to find Windows 10 SDK at %s' % base)
119
120 # Find the latest version.
121 bin_base = base / 'bin'
122
123 versions = [v for v in os.listdir(bin_base) if v.startswith('10.')]
124 version = sorted(versions, reverse=True)[0]
125
126 bin_version = bin_base / version
127
128 return {
129 'root': base,
130 'version': version,
131 'bin_root': bin_version,
132 'bin_x86': bin_version / 'x86',
133 'bin_x64': bin_version / 'x64',
134 }
135
136
137 def normalize_windows_version(version):
76 def normalize_windows_version(version):
138 """Normalize Mercurial version string so WiX/Inno accepts it.
77 """Normalize Mercurial version string so WiX/Inno accepts it.
139
78
@@ -194,93 +133,6 b' def normalize_windows_version(version):'
194 return '.'.join('%d' % x for x in versions[0:4])
133 return '.'.join('%d' % x for x in versions[0:4])
195
134
196
135
197 def find_signtool():
198 """Find signtool.exe from the Windows SDK."""
199 sdk = windows_10_sdk_info()
200
201 for key in ('bin_x64', 'bin_x86'):
202 p = sdk[key] / 'signtool.exe'
203
204 if p.exists():
205 return p
206
207 raise Exception('could not find signtool.exe in Windows 10 SDK')
208
209
210 def sign_with_signtool(
211 file_path,
212 description,
213 subject_name=None,
214 cert_path=None,
215 cert_password=None,
216 timestamp_url=None,
217 ):
218 """Digitally sign a file with signtool.exe.
219
220 ``file_path`` is file to sign.
221 ``description`` is text that goes in the signature.
222
223 The signing certificate can be specified by ``cert_path`` or
224 ``subject_name``. These correspond to the ``/f`` and ``/n`` arguments
225 to signtool.exe, respectively.
226
227 The certificate password can be specified via ``cert_password``. If
228 not provided, you will be prompted for the password.
229
230 ``timestamp_url`` is the URL of a RFC 3161 timestamp server (``/tr``
231 argument to signtool.exe).
232 """
233 if cert_path and subject_name:
234 raise ValueError('cannot specify both cert_path and subject_name')
235
236 while cert_path and not cert_password:
237 cert_password = getpass.getpass('password for %s: ' % cert_path)
238
239 args = [
240 str(find_signtool()),
241 'sign',
242 '/v',
243 '/fd',
244 'sha256',
245 '/d',
246 description,
247 ]
248
249 if cert_path:
250 args.extend(['/f', str(cert_path), '/p', cert_password])
251 elif subject_name:
252 args.extend(['/n', subject_name])
253
254 if timestamp_url:
255 args.extend(['/tr', timestamp_url, '/td', 'sha256'])
256
257 args.append(str(file_path))
258
259 print('signing %s' % file_path)
260 subprocess.run(args, check=True)
261
262
263 PRINT_PYTHON_INFO = '''
264 import platform; print("%s:%s" % (platform.architecture()[0], platform.python_version()))
265 '''.strip()
266
267
268 def python_exe_info(python_exe: pathlib.Path):
269 """Obtain information about a Python executable."""
270
271 res = subprocess.check_output([str(python_exe), '-c', PRINT_PYTHON_INFO])
272
273 arch, version = res.decode('utf-8').split(':')
274
275 version = distutils.version.LooseVersion(version)
276
277 return {
278 'arch': arch,
279 'version': version,
280 'py3': version >= distutils.version.LooseVersion('3'),
281 }
282
283
284 def process_install_rules(
136 def process_install_rules(
285 rules: list, source_dir: pathlib.Path, dest_dir: pathlib.Path
137 rules: list, source_dir: pathlib.Path, dest_dir: pathlib.Path
286 ):
138 ):
@@ -7,376 +7,16 b''
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import collections
11 import json
10 import json
12 import os
11 import os
13 import pathlib
12 import pathlib
14 import re
15 import shutil
13 import shutil
16 import subprocess
17 import typing
14 import typing
18 import uuid
19 import xml.dom.minidom
20
15
21 from .downloads import download_entry
22 from .py2exe import (
23 build_py2exe,
24 stage_install,
25 )
26 from .pyoxidizer import (
16 from .pyoxidizer import (
27 build_docs_html,
17 build_docs_html,
28 create_pyoxidizer_install_layout,
29 run_pyoxidizer,
18 run_pyoxidizer,
30 )
19 )
31 from .util import (
32 extract_zip_to_directory,
33 normalize_windows_version,
34 process_install_rules,
35 sign_with_signtool,
36 )
37
38
39 EXTRA_PACKAGES = {
40 'dulwich',
41 'distutils',
42 'keyring',
43 'pygments',
44 'win32ctypes',
45 }
46
47 EXTRA_INCLUDES = {
48 '_curses',
49 '_curses_panel',
50 }
51
52 EXTRA_INSTALL_RULES = [
53 ('contrib/packaging/wix/COPYING.rtf', 'COPYING.rtf'),
54 ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'),
55 ]
56
57 STAGING_REMOVE_FILES = [
58 # We use the RTF variant.
59 'copying.txt',
60 ]
61
62 SHORTCUTS = {
63 # hg.1.html'
64 'hg.file.5d3e441c_28d9_5542_afd0_cdd4234f12d5': {
65 'Name': 'Mercurial Command Reference',
66 },
67 # hgignore.5.html
68 'hg.file.5757d8e0_f207_5e10_a2ec_3ba0a062f431': {
69 'Name': 'Mercurial Ignore Files',
70 },
71 # hgrc.5.html
72 'hg.file.92e605fd_1d1a_5dc6_9fc0_5d2998eb8f5e': {
73 'Name': 'Mercurial Configuration Files',
74 },
75 }
76
77
78 def find_version(source_dir: pathlib.Path):
79 version_py = source_dir / 'mercurial' / '__version__.py'
80
81 with version_py.open('r', encoding='utf-8') as fh:
82 source = fh.read().strip()
83
84 m = re.search('version = b"(.*)"', source)
85 return m.group(1)
86
87
88 def ensure_vc90_merge_modules(build_dir):
89 x86 = (
90 download_entry(
91 'vc9-crt-x86-msm',
92 build_dir,
93 local_name='microsoft.vcxx.crt.x86_msm.msm',
94 )[0],
95 download_entry(
96 'vc9-crt-x86-msm-policy',
97 build_dir,
98 local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm',
99 )[0],
100 )
101
102 x64 = (
103 download_entry(
104 'vc9-crt-x64-msm',
105 build_dir,
106 local_name='microsoft.vcxx.crt.x64_msm.msm',
107 )[0],
108 download_entry(
109 'vc9-crt-x64-msm-policy',
110 build_dir,
111 local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm',
112 )[0],
113 )
114 return {
115 'x86': x86,
116 'x64': x64,
117 }
118
119
120 def run_candle(wix, cwd, wxs, source_dir, defines=None):
121 args = [
122 str(wix / 'candle.exe'),
123 '-nologo',
124 str(wxs),
125 '-dSourceDir=%s' % source_dir,
126 ]
127
128 if defines:
129 args.extend('-d%s=%s' % define for define in sorted(defines.items()))
130
131 subprocess.run(args, cwd=str(cwd), check=True)
132
133
134 def make_files_xml(staging_dir: pathlib.Path, is_x64) -> str:
135 """Create XML string listing every file to be installed."""
136
137 # We derive GUIDs from a deterministic file path identifier.
138 # We shoehorn the name into something that looks like a URL because
139 # the UUID namespaces are supposed to work that way (even though
140 # the input data probably is never validated).
141
142 doc = xml.dom.minidom.parseString(
143 '<?xml version="1.0" encoding="utf-8"?>'
144 '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
145 '</Wix>'
146 )
147
148 # Assemble the install layout by directory. This makes it easier to
149 # emit XML, since each directory has separate entities.
150 manifest = collections.defaultdict(dict)
151
152 for root, dirs, files in os.walk(staging_dir):
153 dirs.sort()
154
155 root = pathlib.Path(root)
156 rel_dir = root.relative_to(staging_dir)
157
158 for i in range(len(rel_dir.parts)):
159 parent = '/'.join(rel_dir.parts[0 : i + 1])
160 manifest.setdefault(parent, {})
161
162 for f in sorted(files):
163 full = root / f
164 manifest[str(rel_dir).replace('\\', '/')][full.name] = full
165
166 component_groups = collections.defaultdict(list)
167
168 # Now emit a <Fragment> for each directory.
169 # Each directory is composed of a <DirectoryRef> pointing to its parent
170 # and defines child <Directory>'s and a <Component> with all the files.
171 for dir_name, entries in sorted(manifest.items()):
172 # The directory id is derived from the path. But the root directory
173 # is special.
174 if dir_name == '.':
175 parent_directory_id = 'INSTALLDIR'
176 else:
177 parent_directory_id = 'hg.dir.%s' % dir_name.replace(
178 '/', '.'
179 ).replace('-', '_')
180
181 fragment = doc.createElement('Fragment')
182 directory_ref = doc.createElement('DirectoryRef')
183 directory_ref.setAttribute('Id', parent_directory_id)
184
185 # Add <Directory> entries for immediate children directories.
186 for possible_child in sorted(manifest.keys()):
187 if (
188 dir_name == '.'
189 and '/' not in possible_child
190 and possible_child != '.'
191 ):
192 child_directory_id = ('hg.dir.%s' % possible_child).replace(
193 '-', '_'
194 )
195 name = possible_child
196 else:
197 if not possible_child.startswith('%s/' % dir_name):
198 continue
199 name = possible_child[len(dir_name) + 1 :]
200 if '/' in name:
201 continue
202
203 child_directory_id = 'hg.dir.%s' % possible_child.replace(
204 '/', '.'
205 ).replace('-', '_')
206
207 directory = doc.createElement('Directory')
208 directory.setAttribute('Id', child_directory_id)
209 directory.setAttribute('Name', name)
210 directory_ref.appendChild(directory)
211
212 # Add <Component>s for files in this directory.
213 for rel, source_path in sorted(entries.items()):
214 if dir_name == '.':
215 full_rel = rel
216 else:
217 full_rel = '%s/%s' % (dir_name, rel)
218
219 component_unique_id = (
220 'https://www.mercurial-scm.org/wix-installer/0/component/%s'
221 % full_rel
222 )
223 component_guid = uuid.uuid5(uuid.NAMESPACE_URL, component_unique_id)
224 component_id = 'hg.component.%s' % str(component_guid).replace(
225 '-', '_'
226 )
227
228 component = doc.createElement('Component')
229
230 component.setAttribute('Id', component_id)
231 component.setAttribute('Guid', str(component_guid).upper())
232 component.setAttribute('Win64', 'yes' if is_x64 else 'no')
233
234 # Assign this component to a top-level group.
235 if dir_name == '.':
236 component_groups['ROOT'].append(component_id)
237 elif '/' in dir_name:
238 component_groups[dir_name[0 : dir_name.index('/')]].append(
239 component_id
240 )
241 else:
242 component_groups[dir_name].append(component_id)
243
244 unique_id = (
245 'https://www.mercurial-scm.org/wix-installer/0/%s' % full_rel
246 )
247 file_guid = uuid.uuid5(uuid.NAMESPACE_URL, unique_id)
248
249 # IDs have length limits. So use GUID to derive them.
250 file_guid_normalized = str(file_guid).replace('-', '_')
251 file_id = 'hg.file.%s' % file_guid_normalized
252
253 file_element = doc.createElement('File')
254 file_element.setAttribute('Id', file_id)
255 file_element.setAttribute('Source', str(source_path))
256 file_element.setAttribute('KeyPath', 'yes')
257 file_element.setAttribute('ReadOnly', 'yes')
258
259 component.appendChild(file_element)
260 directory_ref.appendChild(component)
261
262 fragment.appendChild(directory_ref)
263 doc.documentElement.appendChild(fragment)
264
265 for group, component_ids in sorted(component_groups.items()):
266 fragment = doc.createElement('Fragment')
267 component_group = doc.createElement('ComponentGroup')
268 component_group.setAttribute('Id', 'hg.group.%s' % group)
269
270 for component_id in component_ids:
271 component_ref = doc.createElement('ComponentRef')
272 component_ref.setAttribute('Id', component_id)
273 component_group.appendChild(component_ref)
274
275 fragment.appendChild(component_group)
276 doc.documentElement.appendChild(fragment)
277
278 # Add <Shortcut> to files that have it defined.
279 for file_id, metadata in sorted(SHORTCUTS.items()):
280 els = doc.getElementsByTagName('File')
281 els = [el for el in els if el.getAttribute('Id') == file_id]
282
283 if not els:
284 raise Exception('could not find File[Id=%s]' % file_id)
285
286 for el in els:
287 shortcut = doc.createElement('Shortcut')
288 shortcut.setAttribute('Id', 'hg.shortcut.%s' % file_id)
289 shortcut.setAttribute('Directory', 'ProgramMenuDir')
290 shortcut.setAttribute('Icon', 'hgIcon.ico')
291 shortcut.setAttribute('IconIndex', '0')
292 shortcut.setAttribute('Advertise', 'yes')
293 for k, v in sorted(metadata.items()):
294 shortcut.setAttribute(k, v)
295
296 el.appendChild(shortcut)
297
298 return doc.toprettyxml()
299
300
301 def build_installer_py2exe(
302 source_dir: pathlib.Path,
303 python_exe: pathlib.Path,
304 msi_name='mercurial',
305 version=None,
306 extra_packages_script=None,
307 extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
308 extra_features: typing.Optional[typing.List[str]] = None,
309 signing_info: typing.Optional[typing.Dict[str, str]] = None,
310 ):
311 """Build a WiX MSI installer using py2exe.
312
313 ``source_dir`` is the path to the Mercurial source tree to use.
314 ``arch`` is the target architecture. either ``x86`` or ``x64``.
315 ``python_exe`` is the path to the Python executable to use/bundle.
316 ``version`` is the Mercurial version string. If not defined,
317 ``mercurial/__version__.py`` will be consulted.
318 ``extra_packages_script`` is a command to be run to inject extra packages
319 into the py2exe binary. It should stage packages into the virtualenv and
320 print a null byte followed by a newline-separated list of packages that
321 should be included in the exe.
322 ``extra_wxs`` is a dict of {wxs_name: working_dir_for_wxs_build}.
323 ``extra_features`` is a list of additional named Features to include in
324 the build. These must match Feature names in one of the wxs scripts.
325 """
326 arch = 'x64' if r'\x64' in os.environ.get('LIB', '') else 'x86'
327
328 hg_build_dir = source_dir / 'build'
329
330 requirements_txt = (
331 source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt'
332 )
333
334 build_py2exe(
335 source_dir,
336 hg_build_dir,
337 python_exe,
338 'wix',
339 requirements_txt,
340 extra_packages=EXTRA_PACKAGES,
341 extra_packages_script=extra_packages_script,
342 extra_includes=EXTRA_INCLUDES,
343 )
344
345 build_dir = hg_build_dir / ('wix-%s' % arch)
346 staging_dir = build_dir / 'stage'
347
348 build_dir.mkdir(exist_ok=True)
349
350 # Purge the staging directory for every build so packaging is pristine.
351 if staging_dir.exists():
352 print('purging %s' % staging_dir)
353 shutil.rmtree(staging_dir)
354
355 stage_install(source_dir, staging_dir, lower_case=True)
356
357 # We also install some extra files.
358 process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
359
360 # And remove some files we don't want.
361 for f in STAGING_REMOVE_FILES:
362 p = staging_dir / f
363 if p.exists():
364 print('removing %s' % p)
365 p.unlink()
366
367 return run_wix_packaging(
368 source_dir,
369 build_dir,
370 staging_dir,
371 arch,
372 version=version,
373 python2=True,
374 msi_name=msi_name,
375 suffix="-python2",
376 extra_wxs=extra_wxs,
377 extra_features=extra_features,
378 signing_info=signing_info,
379 )
380
20
381
21
382 def build_installer_pyoxidizer(
22 def build_installer_pyoxidizer(
@@ -454,133 +94,3 b' def build_installer_pyoxidizer('
454 return {
94 return {
455 "msi_path": dist_path,
95 "msi_path": dist_path,
456 }
96 }
457
458
459 def run_wix_packaging(
460 source_dir: pathlib.Path,
461 build_dir: pathlib.Path,
462 staging_dir: pathlib.Path,
463 arch: str,
464 version: str,
465 python2: bool,
466 msi_name: typing.Optional[str] = "mercurial",
467 suffix: str = "",
468 extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
469 extra_features: typing.Optional[typing.List[str]] = None,
470 signing_info: typing.Optional[typing.Dict[str, str]] = None,
471 ):
472 """Invokes WiX to package up a built Mercurial.
473
474 ``signing_info`` is a dict defining properties to facilitate signing the
475 installer. Recognized keys include ``name``, ``subject_name``,
476 ``cert_path``, ``cert_password``, and ``timestamp_url``. If populated,
477 we will sign both the hg.exe and the .msi using the signing credentials
478 specified.
479 """
480
481 orig_version = version or find_version(source_dir)
482 version = normalize_windows_version(orig_version)
483 print('using version string: %s' % version)
484 if version != orig_version:
485 print('(normalized from: %s)' % orig_version)
486
487 if signing_info:
488 sign_with_signtool(
489 staging_dir / "hg.exe",
490 "%s %s" % (signing_info["name"], version),
491 subject_name=signing_info["subject_name"],
492 cert_path=signing_info["cert_path"],
493 cert_password=signing_info["cert_password"],
494 timestamp_url=signing_info["timestamp_url"],
495 )
496
497 wix_dir = source_dir / 'contrib' / 'packaging' / 'wix'
498
499 wix_pkg, wix_entry = download_entry('wix', build_dir)
500 wix_path = build_dir / ('wix-%s' % wix_entry['version'])
501
502 if not wix_path.exists():
503 extract_zip_to_directory(wix_pkg, wix_path)
504
505 if python2:
506 ensure_vc90_merge_modules(build_dir)
507
508 source_build_rel = pathlib.Path(os.path.relpath(source_dir, build_dir))
509
510 defines = {'Platform': arch}
511
512 # Derive a .wxs file with the staged files.
513 manifest_wxs = build_dir / 'stage.wxs'
514 with manifest_wxs.open('w', encoding='utf-8') as fh:
515 fh.write(make_files_xml(staging_dir, is_x64=arch == 'x64'))
516
517 run_candle(wix_path, build_dir, manifest_wxs, staging_dir, defines=defines)
518
519 for source, rel_path in sorted((extra_wxs or {}).items()):
520 run_candle(wix_path, build_dir, source, rel_path, defines=defines)
521
522 source = wix_dir / 'mercurial.wxs'
523 defines['Version'] = version
524 defines['Comments'] = 'Installs Mercurial version %s' % version
525
526 if python2:
527 defines["PythonVersion"] = "2"
528 defines['VCRedistSrcDir'] = str(build_dir)
529 else:
530 defines["PythonVersion"] = "3"
531
532 if (staging_dir / "lib").exists():
533 defines["MercurialHasLib"] = "1"
534
535 if extra_features:
536 assert all(';' not in f for f in extra_features)
537 defines['MercurialExtraFeatures'] = ';'.join(extra_features)
538
539 run_candle(wix_path, build_dir, source, source_build_rel, defines=defines)
540
541 msi_path = (
542 source_dir
543 / 'dist'
544 / ('%s-%s-%s%s.msi' % (msi_name, orig_version, arch, suffix))
545 )
546
547 args = [
548 str(wix_path / 'light.exe'),
549 '-nologo',
550 '-ext',
551 'WixUIExtension',
552 '-sw1076',
553 '-spdb',
554 '-o',
555 str(msi_path),
556 ]
557
558 for source, rel_path in sorted((extra_wxs or {}).items()):
559 assert source.endswith('.wxs')
560 source = os.path.basename(source)
561 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
562
563 args.extend(
564 [
565 str(build_dir / 'stage.wixobj'),
566 str(build_dir / 'mercurial.wixobj'),
567 ]
568 )
569
570 subprocess.run(args, cwd=str(source_dir), check=True)
571
572 print('%s created' % msi_path)
573
574 if signing_info:
575 sign_with_signtool(
576 msi_path,
577 "%s %s" % (signing_info["name"], version),
578 subject_name=signing_info["subject_name"],
579 cert_path=signing_info["cert_path"],
580 cert_password=signing_info["cert_password"],
581 timestamp_url=signing_info["timestamp_url"],
582 )
583
584 return {
585 'msi_path': msi_path,
586 }
@@ -5,52 +5,35 b' Building the Inno installer requires a W'
5
5
6 The following system dependencies must be installed:
6 The following system dependencies must be installed:
7
7
8 * Python 2.7 (download from https://www.python.org/downloads/)
9 * Microsoft Visual C++ Compiler for Python 2.7
10 (https://www.microsoft.com/en-us/download/details.aspx?id=44266)
11 * Inno Setup (http://jrsoftware.org/isdl.php) version 5.4 or newer.
8 * Inno Setup (http://jrsoftware.org/isdl.php) version 5.4 or newer.
12 Be sure to install the optional Inno Setup Preprocessor feature,
9 Be sure to install the optional Inno Setup Preprocessor feature,
13 which is required.
10 which is required.
14 * Python 3.5+ (to run the ``packaging.py`` script)
11 * Python 3.6+ (to run the ``packaging.py`` script)
15
12
16 Building
13 Building
17 ========
14 ========
18
15
19 The ``packaging.py`` script automates the process of producing an
16 The ``packaging.py`` script automates the process of producing an Inno
20 Inno installer. It manages fetching and configuring the
17 installer. It manages fetching and configuring non-system dependencies
21 non-system dependencies (such as py2exe, gettext, and various
18 (such as gettext, and various Python packages). It can be run from a
22 Python packages).
19 basic cmd.exe Window (i.e. activating the MSBuildTools environment is
23
20 not required).
24 The script requires an activated ``Visual C++ 2008`` command prompt.
25 A shortcut to such a prompt was installed with ``Microsoft Visual C++
26 Compiler for Python 2.7``. From your Start Menu, look for
27 ``Microsoft Visual C++ Compiler Package for Python 2.7`` then launch
28 either ``Visual C++ 2008 32-bit Command Prompt`` or
29 ``Visual C++ 2008 64-bit Command Prompt``.
30
21
31 From the prompt, change to the Mercurial source directory. e.g.
22 From the prompt, change to the Mercurial source directory. e.g.
32 ``cd c:\src\hg``.
23 ``cd c:\src\hg``.
33
24
34 Next, invoke ``packaging.py`` to produce an Inno installer. You will
25 Next, invoke ``packaging.py`` to produce an Inno installer.::
35 need to supply the path to the Python interpreter to use.::
36
26
37 $ py -3 contrib\packaging\packaging.py \
27 $ py -3 contrib\packaging\packaging.py \
38 inno --python c:\python27\python.exe
28 inno --pyoxidizer-target x86_64-pc-windows-msvc
39
40 .. note::
41
42 The script validates that the Visual C++ environment is
43 active and that the architecture of the specified Python
44 interpreter matches the Visual C++ environment and errors
45 if not.
46
29
47 If everything runs as intended, dependencies will be fetched and
30 If everything runs as intended, dependencies will be fetched and
48 configured into the ``build`` sub-directory, Mercurial will be built,
31 configured into the ``build`` sub-directory, Mercurial will be built,
49 and an installer placed in the ``dist`` sub-directory. The final
32 and an installer placed in the ``dist`` sub-directory. The final line
50 line of output should print the name of the generated installer.
33 of output should print the name of the generated installer.
51
34
52 Additional options may be configured. Run
35 Additional options may be configured. Run ``packaging.py inno --help``
53 ``packaging.py inno --help`` to see a list of program flags.
36 to see a list of program flags.
54
37
55 MinGW
38 MinGW
56 =====
39 =====
@@ -126,14 +126,6 b' install -m 755 contrib/chg/chg $RPM_BUIL'
126 install -m 755 contrib/hgk $RPM_BUILD_ROOT%{_bindir}/
126 install -m 755 contrib/hgk $RPM_BUILD_ROOT%{_bindir}/
127 install -m 755 contrib/hg-ssh $RPM_BUILD_ROOT%{_bindir}/
127 install -m 755 contrib/hg-ssh $RPM_BUILD_ROOT%{_bindir}/
128
128
129 bash_completion_dir=$RPM_BUILD_ROOT%{_sysconfdir}/bash_completion.d
130 mkdir -p $bash_completion_dir
131 install -m 644 contrib/bash_completion $bash_completion_dir/mercurial.sh
132
133 zsh_completion_dir=$RPM_BUILD_ROOT%{_datadir}/zsh/site-functions
134 mkdir -p $zsh_completion_dir
135 install -m 644 contrib/zsh_completion $zsh_completion_dir/_mercurial
136
137 mkdir -p $RPM_BUILD_ROOT%{emacs_lispdir}
129 mkdir -p $RPM_BUILD_ROOT%{emacs_lispdir}
138 install -m 644 contrib/mercurial.el $RPM_BUILD_ROOT%{emacs_lispdir}/
130 install -m 644 contrib/mercurial.el $RPM_BUILD_ROOT%{emacs_lispdir}/
139 install -m 644 contrib/mq.el $RPM_BUILD_ROOT%{emacs_lispdir}/
131 install -m 644 contrib/mq.el $RPM_BUILD_ROOT%{emacs_lispdir}/
@@ -148,9 +140,12 b' rm -rf $RPM_BUILD_ROOT'
148 %doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html *.cgi contrib/*.fcgi contrib/*.wsgi
140 %doc CONTRIBUTORS COPYING doc/README doc/hg*.txt doc/hg*.html *.cgi contrib/*.fcgi contrib/*.wsgi
149 %doc %attr(644,root,root) %{_mandir}/man?/hg*
141 %doc %attr(644,root,root) %{_mandir}/man?/hg*
150 %doc %attr(644,root,root) contrib/*.svg
142 %doc %attr(644,root,root) contrib/*.svg
143 %dir %{_datadir}/bash-completion/
144 %dir %{_datadir}/bash-completion/completions
145 %{_datadir}/bash-completion/completions/hg
151 %dir %{_datadir}/zsh/
146 %dir %{_datadir}/zsh/
152 %dir %{_datadir}/zsh/site-functions/
147 %dir %{_datadir}/zsh/site-functions/
153 %{_datadir}/zsh/site-functions/_mercurial
148 %{_datadir}/zsh/site-functions/_hg
154 %dir %{_datadir}/emacs/site-lisp/
149 %dir %{_datadir}/emacs/site-lisp/
155 %{_datadir}/emacs/site-lisp/mercurial.el
150 %{_datadir}/emacs/site-lisp/mercurial.el
156 %{_datadir}/emacs/site-lisp/mq.el
151 %{_datadir}/emacs/site-lisp/mq.el
@@ -158,8 +153,6 b' rm -rf $RPM_BUILD_ROOT'
158 %{_bindir}/chg
153 %{_bindir}/chg
159 %{_bindir}/hgk
154 %{_bindir}/hgk
160 %{_bindir}/hg-ssh
155 %{_bindir}/hg-ssh
161 %dir %{_sysconfdir}/bash_completion.d/
162 %config(noreplace) %{_sysconfdir}/bash_completion.d/mercurial.sh
163 %dir %{_sysconfdir}/mercurial
156 %dir %{_sysconfdir}/mercurial
164 %dir %{_sysconfdir}/mercurial/hgrc.d
157 %dir %{_sysconfdir}/mercurial/hgrc.d
165 %if "%{?withpython}"
158 %if "%{?withpython}"
@@ -1,13 +1,11 b''
1 docutils
1 docutils
2 # Pinned to an old version because 0.20 drops Python 3 compatibility.
2 dulwich
3 dulwich < 0.20 ; python_version <= '2.7'
4 dulwich ; python_version >= '3'
5
3
6 # Needed by the release note tooling
4 # Needed by the release note tooling
7 fuzzywuzzy
5 fuzzywuzzy
8
6
9 keyring
7 keyring
10 pygit2 ; python_version >= '3'
8 pygit2
11 pygments
9 pygments
12
10
13 # Needed by the phabricator tests
11 # Needed by the phabricator tests
@@ -33,8 +33,8 b''
33 CompressionLevel='high' />
33 CompressionLevel='high' />
34 <Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" />
34 <Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" />
35
35
36 <Condition Message='Mercurial MSI installers require Windows XP or higher'>
36 <Condition Message='Mercurial MSI installers require Windows 8.1 or higher'>
37 VersionNT >= 501
37 VersionNT >= 603
38 </Condition>
38 </Condition>
39
39
40 <Property Id="INSTALLDIR">
40 <Property Id="INSTALLDIR">
@@ -79,23 +79,6 b''
79 </Component>
79 </Component>
80 </Directory>
80 </Directory>
81 </Directory>
81 </Directory>
82
83 <!-- Install VCRedist merge modules on Python 2. On Python 3,
84 vcruntimeXXX.dll is part of the install layout and gets picked up
85 as a regular file. -->
86 <?if $(var.PythonVersion) = "2" ?>
87 <?if $(var.Platform) = "x86" ?>
88 <Merge Id='VCRuntime' DiskId='1' Language='1033'
89 SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x86_msm.msm' />
90 <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033'
91 SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x86_msm.msm' />
92 <?else?>
93 <Merge Id='VCRuntime' DiskId='1' Language='1033'
94 SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x64_msm.msm' />
95 <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033'
96 SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x64_msm.msm' />
97 <?endif?>
98 <?endif?>
99 </Directory>
82 </Directory>
100
83
101 <Feature Id='Complete' Title='Mercurial' Description='The complete package'
84 <Feature Id='Complete' Title='Mercurial' Description='The complete package'
@@ -111,10 +94,6 b''
111 <ComponentGroupRef Id="hg.group.lib" />
94 <ComponentGroupRef Id="hg.group.lib" />
112 <?endif?>
95 <?endif?>
113 <ComponentGroupRef Id="hg.group.templates" />
96 <ComponentGroupRef Id="hg.group.templates" />
114 <?if $(var.PythonVersion) = "2" ?>
115 <MergeRef Id='VCRuntime' />
116 <MergeRef Id='VCRuntimePolicy' />
117 <?endif?>
118 </Feature>
97 </Feature>
119 <?ifdef MercurialExtraFeatures?>
98 <?ifdef MercurialExtraFeatures?>
120 <?foreach EXTRAFEAT in $(var.MercurialExtraFeatures)?>
99 <?foreach EXTRAFEAT in $(var.MercurialExtraFeatures)?>
@@ -12,50 +12,36 b' that do not have this requirement.'
12 Requirements
12 Requirements
13 ============
13 ============
14
14
15 Building the WiX installers requires a Windows machine. The following
15 Building the WiX installer requires a Windows machine.
16 dependencies must be installed:
17
16
18 * Python 2.7 (download from https://www.python.org/downloads/)
17 The following system dependencies must be installed:
19 * Microsoft Visual C++ Compiler for Python 2.7
18
20 (https://www.microsoft.com/en-us/download/details.aspx?id=44266)
19 * Python 3.6+ (to run the ``packaging.py`` script)
21 * Python 3.5+ (to run the ``packaging.py`` script)
22
20
23 Building
21 Building
24 ========
22 ========
25
23
26 The ``packaging.py`` script automates the process of producing an MSI
24 The ``packaging.py`` script automates the process of producing an MSI
27 installer. It manages fetching and configuring non-system dependencies
25 installer. It manages fetching and configuring non-system dependencies
28 (such as py2exe, gettext, and various Python packages).
26 (such as gettext, and various Python packages). It can be run from a
29
27 basic cmd.exe Window (i.e. activating the MSBuildTools environment is
30 The script requires an activated ``Visual C++ 2008`` command prompt.
28 not required).
31 A shortcut to such a prompt was installed with ``Microsoft Visual
32 C++ Compiler for Python 2.7``. From your Start Menu, look for
33 ``Microsoft Visual C++ Compiler Package for Python 2.7`` then
34 launch either ``Visual C++ 2008 32-bit Command Prompt`` or
35 ``Visual C++ 2008 64-bit Command Prompt``.
36
29
37 From the prompt, change to the Mercurial source directory. e.g.
30 From the prompt, change to the Mercurial source directory. e.g.
38 ``cd c:\src\hg``.
31 ``cd c:\src\hg``.
39
32
40 Next, invoke ``packaging.py`` to produce an MSI installer. You will need
33 Next, invoke ``packaging.py`` to produce an MSI installer.::
41 to supply the path to the Python interpreter to use.::
42
34
43 $ py -3 contrib\packaging\packaging.py \
35 $ py -3 contrib\packaging\packaging.py \
44 wix --python c:\python27\python.exe
36 wix --pyoxidizer-target x86_64-pc-windows-msvc
45
46 .. note::
47
48 The script validates that the Visual C++ environment is active and
49 that the architecture of the specified Python interpreter matches the
50 Visual C++ environment. An error is raised otherwise.
51
37
52 If everything runs as intended, dependencies will be fetched and
38 If everything runs as intended, dependencies will be fetched and
53 configured into the ``build`` sub-directory, Mercurial will be built,
39 configured into the ``build`` sub-directory, Mercurial will be built,
54 and an installer placed in the ``dist`` sub-directory. The final line
40 and an installer placed in the ``dist`` sub-directory. The final line
55 of output should print the name of the generated installer.
41 of output should print the name of the generated installer.
56
42
57 Additional options may be configured. Run ``packaging.py wix --help`` to
43 Additional options may be configured. Run ``packaging.py wix --help``
58 see a list of program flags.
44 to see a list of program flags.
59
45
60 Relationship to TortoiseHG
46 Relationship to TortoiseHG
61 ==========================
47 ==========================
@@ -63,7 +49,7 b' Relationship to TortoiseHG'
63 TortoiseHG uses the WiX files in this directory.
49 TortoiseHG uses the WiX files in this directory.
64
50
65 The code for building TortoiseHG installers lives at
51 The code for building TortoiseHG installers lives at
66 https://bitbucket.org/tortoisehg/thg-winbuild and is maintained by
52 https://foss.heptapod.net/mercurial/tortoisehg/thg-winbuild and is maintained by
67 Steve Borho (steve@borho.org).
53 Steve Borho (steve@borho.org).
68
54
69 When changing behavior of the WiX installer, be sure to notify
55 When changing behavior of the WiX installer, be sure to notify
@@ -9,7 +9,6 b''
9 # various plot related to write performance in a revlog
9 # various plot related to write performance in a revlog
10 #
10 #
11 # usage: perf-revlog-write-plot.py details.json
11 # usage: perf-revlog-write-plot.py details.json
12 from __future__ import absolute_import, print_function
13 import json
12 import json
14 import re
13 import re
15
14
@@ -6,7 +6,6 b''
6 # This use a subsetmaker extension (next to this script) to generate a steam of
6 # This use a subsetmaker extension (next to this script) to generate a steam of
7 # random discovery instance. When interesting case are discovered, information
7 # random discovery instance. When interesting case are discovered, information
8 # about them are print on the stdout.
8 # about them are print on the stdout.
9 from __future__ import print_function
10
9
11 import json
10 import json
12 import os
11 import os
@@ -143,18 +142,35 b' def interesting_boundary(res):'
143
142
144 Ideally, we would make this configurable, but this is not a focus for now
143 Ideally, we would make this configurable, but this is not a focus for now
145
144
146 return None or (round-trip, undecided-common, undecided-missing)
145 return None or (
146 round-trip,
147 undecided-common,
148 undecided-missing,
149 total-revs,
150 common-revs,
151 missing-revs,
152 )
147 """
153 """
148 roundtrips = res["total-roundtrips"]
154 roundtrips = res["total-roundtrips"]
149 if roundtrips <= 1:
155 if roundtrips <= 1:
150 return None
156 return None
157 total_revs = res["nb-revs"]
158 common_revs = res["nb-revs-common"]
159 missing_revs = res["nb-revs-missing"]
151 undecided_common = res["nb-ini_und-common"]
160 undecided_common = res["nb-ini_und-common"]
152 undecided_missing = res["nb-ini_und-missing"]
161 undecided_missing = res["nb-ini_und-missing"]
153 if undecided_common == 0:
162 if undecided_common == 0:
154 return None
163 return None
155 if undecided_missing == 0:
164 if undecided_missing == 0:
156 return None
165 return None
157 return (roundtrips, undecided_common, undecided_missing)
166 return (
167 roundtrips,
168 undecided_common,
169 undecided_missing,
170 total_revs,
171 common_revs,
172 missing_revs,
173 )
158
174
159
175
160 def end(*args, **kwargs):
176 def end(*args, **kwargs):
@@ -15,6 +15,10 b' from mercurial import ('
15 smartset,
15 smartset,
16 )
16 )
17
17
18 import sortedcontainers
19
20 SortedSet = sortedcontainers.SortedSet
21
18 revsetpredicate = registrar.revsetpredicate()
22 revsetpredicate = registrar.revsetpredicate()
19
23
20
24
@@ -78,7 +82,7 b' def scratch(repo, subset, x):'
78 n = revsetlang.getinteger(n, _(b"scratch expects a number"))
82 n = revsetlang.getinteger(n, _(b"scratch expects a number"))
79
83
80 selected = set()
84 selected = set()
81 heads = set()
85 heads = SortedSet()
82 children_count = collections.defaultdict(lambda: 0)
86 children_count = collections.defaultdict(lambda: 0)
83 parents = repo.changelog._uncheckedparentrevs
87 parents = repo.changelog._uncheckedparentrevs
84
88
@@ -102,7 +106,7 b' def scratch(repo, subset, x):'
102 for x in range(n):
106 for x in range(n):
103 if not heads:
107 if not heads:
104 break
108 break
105 pick = rand.choice(list(heads))
109 pick = rand.choice(heads)
106 heads.remove(pick)
110 heads.remove(pick)
107 assert pick not in selected
111 assert pick not in selected
108 selected.add(pick)
112 selected.add(pick)
@@ -155,16 +159,44 b' def antichain(repo, subset, x):'
155 else:
159 else:
156 assert False
160 assert False
157
161
158 selected = set()
162 cl = repo.changelog
159
163
160 baseset = revset.getset(repo, smartset.fullreposet(repo), x)
164 # We already have cheap access to the parent mapping.
161 undecided = baseset
165 # However, we need to build a mapping of the children mapping
166 parents = repo.changelog._uncheckedparentrevs
167 children_map = collections.defaultdict(list)
168 for r in cl:
169 p1, p2 = parents(r)
170 if p1 >= 0:
171 children_map[p1].append(r)
172 if p2 >= 0:
173 children_map[p2].append(r)
174 children = children_map.__getitem__
175
176 selected = set()
177 undecided = SortedSet(cl)
162
178
163 while undecided:
179 while undecided:
164 pick = rand.choice(list(undecided))
180 # while there is "undecided content", we pick a random changeset X
181 # and we remove anything in `::X + X::` from undecided content
182 pick = rand.choice(undecided)
165 selected.add(pick)
183 selected.add(pick)
166 undecided = repo.revs(
184 undecided.remove(pick)
167 '%ld and not (::%ld or %ld::head())', baseset, selected, selected
185
168 )
186 ancestors = set(p for p in parents(pick) if p in undecided)
187 descendants = set(c for c in children(pick) if c in undecided)
188
189 while ancestors:
190 current = ancestors.pop()
191 undecided.remove(current)
192 for p in parents(current):
193 if p in undecided:
194 ancestors.add(p)
195 while descendants:
196 current = descendants.pop()
197 undecided.remove(current)
198 for p in children(current):
199 if p in undecided:
200 ancestors.add(p)
169
201
170 return smartset.baseset(selected) & subset
202 return smartset.baseset(selected) & subset
@@ -54,7 +54,6 b' Configurations'
54 # - make perf command for recent feature work correctly with early
54 # - make perf command for recent feature work correctly with early
55 # Mercurial
55 # Mercurial
56
56
57 from __future__ import absolute_import
58 import contextlib
57 import contextlib
59 import functools
58 import functools
60 import gc
59 import gc
@@ -370,7 +369,7 b' def getlen(ui):'
370 return len
369 return len
371
370
372
371
373 class noop(object):
372 class noop:
374 """dummy context manager"""
373 """dummy context manager"""
375
374
376 def __enter__(self):
375 def __enter__(self):
@@ -414,7 +413,7 b' def gettimer(ui, opts=None):'
414 # available since 2.2 (or ae5f92e154d3)
413 # available since 2.2 (or ae5f92e154d3)
415 from mercurial import node
414 from mercurial import node
416
415
417 class defaultformatter(object):
416 class defaultformatter:
418 """Minimized composition of baseformatter and plainformatter"""
417 """Minimized composition of baseformatter and plainformatter"""
419
418
420 def __init__(self, ui, topic, opts):
419 def __init__(self, ui, topic, opts):
@@ -653,7 +652,7 b' def safeattrsetter(obj, name, ignoremiss'
653
652
654 origvalue = getattr(obj, _sysstr(name))
653 origvalue = getattr(obj, _sysstr(name))
655
654
656 class attrutil(object):
655 class attrutil:
657 def set(self, newvalue):
656 def set(self, newvalue):
658 setattr(obj, _sysstr(name), newvalue)
657 setattr(obj, _sysstr(name), newvalue)
659
658
@@ -2943,7 +2942,7 b' def perfrevlogwrite(ui, repo, file_=None'
2943 fm.end()
2942 fm.end()
2944
2943
2945
2944
2946 class _faketr(object):
2945 class _faketr:
2947 def add(s, x, y, z=None):
2946 def add(s, x, y, z=None):
2948 return None
2947 return None
2949
2948
@@ -3,7 +3,6 b''
3 # A small script to automatically reject idle Diffs
3 # A small script to automatically reject idle Diffs
4 #
4 #
5 # you need to set the PHABBOT_USER and PHABBOT_TOKEN environment variable for authentication
5 # you need to set the PHABBOT_USER and PHABBOT_TOKEN environment variable for authentication
6 from __future__ import absolute_import, print_function
7
6
8 import datetime
7 import datetime
9 import os
8 import os
@@ -1,7 +1,6 b''
1 '''
1 '''
2 Examples of useful python hooks for Mercurial.
2 Examples of useful python hooks for Mercurial.
3 '''
3 '''
4 from __future__ import absolute_import
5 from mercurial import (
4 from mercurial import (
6 patch,
5 patch,
7 util,
6 util,
@@ -4,7 +4,6 b''
4 # This software may be modified and distributed under the terms
4 # This software may be modified and distributed under the terms
5 # of the BSD license. See the LICENSE file for details.
5 # of the BSD license. See the LICENSE file for details.
6
6
7 from __future__ import absolute_import
8
7
9 import cffi
8 import cffi
10 import distutils.ccompiler
9 import distutils.ccompiler
@@ -5,7 +5,6 b''
5 # This software may be modified and distributed under the terms
5 # This software may be modified and distributed under the terms
6 # of the BSD license. See the LICENSE file for details.
6 # of the BSD license. See the LICENSE file for details.
7
7
8 from __future__ import print_function
9
8
10 from distutils.version import LooseVersion
9 from distutils.version import LooseVersion
11 import os
10 import os
@@ -1,5 +1,3 b''
1 from __future__ import unicode_literals
2
3 import unittest
1 import unittest
4
2
5 import zstandard as zstd
3 import zstandard as zstd
@@ -6,7 +6,6 b''
6
6
7 """Python interface to the Zstandard (zstd) compression library."""
7 """Python interface to the Zstandard (zstd) compression library."""
8
8
9 from __future__ import absolute_import, unicode_literals
10
9
11 # This module serves 2 roles:
10 # This module serves 2 roles:
12 #
11 #
@@ -6,7 +6,6 b''
6
6
7 """Python interface to the Zstandard (zstd) compression library."""
7 """Python interface to the Zstandard (zstd) compression library."""
8
8
9 from __future__ import absolute_import, unicode_literals
10
9
11 # This should match what the C extension exports.
10 # This should match what the C extension exports.
12 __all__ = [
11 __all__ = [
@@ -15,8 +15,6 b' You probably want to run it like this:'
15 $ python3 ../contrib/python3-ratchet.py \
15 $ python3 ../contrib/python3-ratchet.py \
16 > --working-tests=../contrib/python3-whitelist
16 > --working-tests=../contrib/python3-whitelist
17 """
17 """
18 from __future__ import print_function
19 from __future__ import absolute_import
20
18
21 import argparse
19 import argparse
22 import json
20 import json
@@ -8,7 +8,6 b''
8 #
8 #
9 # call with --help for details
9 # call with --help for details
10
10
11 from __future__ import absolute_import, print_function
12 import math
11 import math
13 import optparse # cannot use argparse, python 2.7 only
12 import optparse # cannot use argparse, python 2.7 only
14 import os
13 import os
@@ -4,7 +4,6 b''
4 r"""dump stack trace when receiving SIGQUIT (Ctrl-\) or SIGINFO (Ctrl-T on BSDs)
4 r"""dump stack trace when receiving SIGQUIT (Ctrl-\) or SIGINFO (Ctrl-T on BSDs)
5 """
5 """
6
6
7 from __future__ import absolute_import, print_function
8 import signal
7 import signal
9 import sys
8 import sys
10 import traceback
9 import traceback
@@ -1,5 +1,4 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 from __future__ import absolute_import
3
2
4 import getopt
3 import getopt
5 import sys
4 import sys
@@ -36,7 +36,6 b' A few obvious properties that are not cu'
36 - Symlinks and binary files are ignored
36 - Symlinks and binary files are ignored
37 '''
37 '''
38
38
39 from __future__ import absolute_import
40 import bisect
39 import bisect
41 import collections
40 import collections
42 import itertools
41 import itertools
@@ -213,7 +212,7 b' def analyze(ui, repo, *revs, **opts):'
213 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
212 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
214 if isbin:
213 if isbin:
215 continue
214 continue
216 added = sum(pycompat.itervalues(lineadd), 0)
215 added = sum(lineadd.values(), 0)
217 if mar == 'm':
216 if mar == 'm':
218 if added and lineremove:
217 if added and lineremove:
219 lineschanged[
218 lineschanged[
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import, print_function
9
8
10 import abc
9 import abc
11 import re
10 import re
@@ -80,7 +79,7 b' def writeerr(data):'
80 ####################
79 ####################
81
80
82
81
83 class embeddedmatcher(object): # pytype: disable=ignored-metaclass
82 class embeddedmatcher: # pytype: disable=ignored-metaclass
84 """Base class to detect embedded code fragments in *.t test script"""
83 """Base class to detect embedded code fragments in *.t test script"""
85
84
86 __metaclass__ = abc.ABCMeta
85 __metaclass__ = abc.ABCMeta
@@ -157,7 +156,7 b' def embedded(basefile, lines, errors, ma'
157 :ends: line number (1-origin), at which embedded code ends (exclusive)
156 :ends: line number (1-origin), at which embedded code ends (exclusive)
158 :code: extracted embedded code, which is single-stringified
157 :code: extracted embedded code, which is single-stringified
159
158
160 >>> class ambigmatcher(object):
159 >>> class ambigmatcher:
161 ... # mock matcher class to examine implementation of
160 ... # mock matcher class to examine implementation of
162 ... # "ambiguous matching" corner case
161 ... # "ambiguous matching" corner case
163 ... def __init__(self, desc, matchfunc):
162 ... def __init__(self, desc, matchfunc):
@@ -3,7 +3,6 b''
3 # $ hg init
3 # $ hg init
4 # $ undumprevlog < repo.dump
4 # $ undumprevlog < repo.dump
5
5
6 from __future__ import absolute_import, print_function
7
6
8 import sys
7 import sys
9 from mercurial.node import bin
8 from mercurial.node import bin
@@ -78,7 +78,6 b''
78 # - Restart the web server and see if things are running.
78 # - Restart the web server and see if things are running.
79 #
79 #
80
80
81 from __future__ import absolute_import
82
81
83 # Configuration file location
82 # Configuration file location
84 hgweb_config = r'c:\your\directory\wsgi.config'
83 hgweb_config = r'c:\your\directory\wsgi.config'
@@ -5,7 +5,7 b''
5 ; This file will be replaced by the installer on every upgrade.
5 ; This file will be replaced by the installer on every upgrade.
6 ; Editing this file can cause strange side effects on Vista.
6 ; Editing this file can cause strange side effects on Vista.
7 ;
7 ;
8 ; http://bitbucket.org/tortoisehg/stable/issue/135
8 ; https://foss.heptapod.net/mercurial/tortoisehg/thg/-/issues/135
9 ;
9 ;
10 ; To change settings you see in this file, override (or enable) them in
10 ; To change settings you see in this file, override (or enable) them in
11 ; your user Mercurial.ini file, where USERNAME is your Windows user name:
11 ; your user Mercurial.ini file, where USERNAME is your Windows user name:
@@ -2,7 +2,6 b''
2 #
2 #
3 # checkseclevel - checking section title levels in each online help document
3 # checkseclevel - checking section title levels in each online help document
4
4
5 from __future__ import absolute_import
6
5
7 import optparse
6 import optparse
8 import os
7 import os
@@ -7,7 +7,6 b''
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import, print_function
11
10
12 import os
11 import os
13 import re
12 import re
@@ -4,7 +4,6 b''
4 where DOC is the name of a document
4 where DOC is the name of a document
5 """
5 """
6
6
7 from __future__ import absolute_import
8
7
9 import os
8 import os
10 import sys
9 import sys
@@ -41,7 +41,6 b' A unix-like system keeps an index of the'
41 by the command whatis or apropos.
41 by the command whatis or apropos.
42
42
43 """
43 """
44 from __future__ import absolute_import
45
44
46 __docformat__ = 'reStructuredText'
45 __docformat__ = 'reStructuredText'
47
46
@@ -113,7 +112,7 b' class Writer(writers.Writer):'
113 self.output = visitor.astext()
112 self.output = visitor.astext()
114
113
115
114
116 class Table(object):
115 class Table:
117 def __init__(self):
116 def __init__(self):
118 self._rows = []
117 self._rows = []
119 self._options = ['center']
118 self._options = ['center']
@@ -313,7 +312,7 b' class Translator(nodes.NodeVisitor):'
313 pass
312 pass
314
313
315 def list_start(self, node):
314 def list_start(self, node):
316 class enum_char(object):
315 class enum_char:
317 enum_style = {
316 enum_style = {
318 'bullet': '\\(bu',
317 'bullet': '\\(bu',
319 'emdash': '\\(em',
318 'emdash': '\\(em',
@@ -12,7 +12,6 b''
12 where WRITER is the name of a Docutils writer such as 'html' or 'manpage'
12 where WRITER is the name of a Docutils writer such as 'html' or 'manpage'
13 """
13 """
14
14
15 from __future__ import absolute_import
16
15
17 import sys
16 import sys
18
17
@@ -6,7 +6,6 b''
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9 from __future__ import absolute_import
10
9
11 import os
10 import os
12 import sys
11 import sys
@@ -44,10 +43,9 b' from hgdemandimport import tracing'
44 with tracing.log('hg script'):
43 with tracing.log('hg script'):
45 # enable importing on demand to reduce startup time
44 # enable importing on demand to reduce startup time
46 try:
45 try:
47 if sys.version_info[0] < 3 or sys.version_info >= (3, 6):
46 import hgdemandimport
48 import hgdemandimport
49
47
50 hgdemandimport.enable()
48 hgdemandimport.enable()
51 except ImportError:
49 except ImportError:
52 sys.stderr.write(
50 sys.stderr.write(
53 "abort: couldn't find mercurial libraries in [%s]\n"
51 "abort: couldn't find mercurial libraries in [%s]\n"
@@ -11,15 +11,11 b''
11 # demand loading is per-package. Keeping demandimport in the mercurial package
11 # demand loading is per-package. Keeping demandimport in the mercurial package
12 # would disable demand loading for any modules in mercurial.
12 # would disable demand loading for any modules in mercurial.
13
13
14 from __future__ import absolute_import
15
14
16 import os
15 import os
17 import sys
16 import sys
18
17
19 if sys.version_info[0] >= 3:
18 from . import demandimportpy3 as demandimport
20 from . import demandimportpy3 as demandimport
21 else:
22 from . import demandimportpy2 as demandimport
23
19
24 # Full module names which can't be lazy imported.
20 # Full module names which can't be lazy imported.
25 # Extensions can add to this set.
21 # Extensions can add to this set.
@@ -24,7 +24,6 b' This also has some limitations compared '
24 """
24 """
25
25
26 # This line is unnecessary, but it satisfies test-check-py3-compat.t.
26 # This line is unnecessary, but it satisfies test-check-py3-compat.t.
27 from __future__ import absolute_import
28
27
29 import contextlib
28 import contextlib
30 import importlib.util
29 import importlib.util
@@ -34,12 +33,6 b' from . import tracing'
34
33
35 _deactivated = False
34 _deactivated = False
36
35
37 # Python 3.5's LazyLoader doesn't work for some reason.
38 # https://bugs.python.org/issue26186 is a known issue with extension
39 # importing. But it appears to not have a meaningful effect with
40 # Mercurial.
41 _supported = sys.version_info[0:2] >= (3, 6)
42
43
36
44 class _lazyloaderex(importlib.util.LazyLoader):
37 class _lazyloaderex(importlib.util.LazyLoader):
45 """This is a LazyLoader except it also follows the _deactivated global and
38 """This is a LazyLoader except it also follows the _deactivated global and
@@ -55,7 +48,7 b' class _lazyloaderex(importlib.util.LazyL'
55 super().exec_module(module)
48 super().exec_module(module)
56
49
57
50
58 class LazyFinder(object):
51 class LazyFinder:
59 """A wrapper around a ``MetaPathFinder`` that makes loaders lazy.
52 """A wrapper around a ``MetaPathFinder`` that makes loaders lazy.
60
53
61 ``sys.meta_path`` finders have their ``find_spec()`` called to locate a
54 ``sys.meta_path`` finders have their ``find_spec()`` called to locate a
@@ -145,9 +138,6 b' def disable():'
145
138
146
139
147 def enable():
140 def enable():
148 if not _supported:
149 return
150
151 new_finders = []
141 new_finders = []
152 for finder in sys.meta_path:
142 for finder in sys.meta_path:
153 new_finders.append(
143 new_finders.append(
@@ -5,7 +5,6 b''
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 from __future__ import absolute_import
9
8
10 import contextlib
9 import contextlib
11 import os
10 import os
@@ -1,4 +1,3 b''
1 from __future__ import absolute_import
2 import pkgutil
1 import pkgutil
3
2
4 __path__ = pkgutil.extend_path(__path__, __name__)
3 __path__ = pkgutil.extend_path(__path__, __name__)
@@ -31,7 +31,6 b' amend modified chunks into the correspon'
31 # * Converge getdraftstack() with other code in core
31 # * Converge getdraftstack() with other code in core
32 # * move many attributes on fixupstate to be private
32 # * move many attributes on fixupstate to be private
33
33
34 from __future__ import absolute_import
35
34
36 import collections
35 import collections
37
36
@@ -84,7 +83,7 b' colortable = {'
84 defaultdict = collections.defaultdict
83 defaultdict = collections.defaultdict
85
84
86
85
87 class nullui(object):
86 class nullui:
88 """blank ui object doing nothing"""
87 """blank ui object doing nothing"""
89
88
90 debugflag = False
89 debugflag = False
@@ -98,7 +97,7 b' class nullui(object):'
98 return nullfunc
97 return nullfunc
99
98
100
99
101 class emptyfilecontext(object):
100 class emptyfilecontext:
102 """minimal filecontext representing an empty file"""
101 """minimal filecontext representing an empty file"""
103
102
104 def __init__(self, repo):
103 def __init__(self, repo):
@@ -278,7 +277,7 b' def overlaycontext(memworkingcopy, ctx, '
278 )
277 )
279
278
280
279
281 class filefixupstate(object):
280 class filefixupstate:
282 """state needed to apply fixups to a single file
281 """state needed to apply fixups to a single file
283
282
284 internally, it keeps file contents of several revisions and a linelog.
283 internally, it keeps file contents of several revisions and a linelog.
@@ -425,7 +424,7 b' class filefixupstate(object):'
425 newfixups.append((fixuprev, a1, a2, b1, b2))
424 newfixups.append((fixuprev, a1, a2, b1, b2))
426 elif a2 - a1 == b2 - b1 or b1 == b2:
425 elif a2 - a1 == b2 - b1 or b1 == b2:
427 # 1:1 line mapping, or chunk was deleted
426 # 1:1 line mapping, or chunk was deleted
428 for i in pycompat.xrange(a1, a2):
427 for i in range(a1, a2):
429 rev, linenum = annotated[i]
428 rev, linenum = annotated[i]
430 if rev > 1:
429 if rev > 1:
431 if b1 == b2: # deletion, simply remove that single line
430 if b1 == b2: # deletion, simply remove that single line
@@ -452,7 +451,7 b' class filefixupstate(object):'
452 """
451 """
453 llog = linelog.linelog()
452 llog = linelog.linelog()
454 a, alines = b'', []
453 a, alines = b'', []
455 for i in pycompat.xrange(len(self.contents)):
454 for i in range(len(self.contents)):
456 b, blines = self.contents[i], self.contentlines[i]
455 b, blines = self.contents[i], self.contentlines[i]
457 llrev = i * 2 + 1
456 llrev = i * 2 + 1
458 chunks = self._alldiffchunks(a, b, alines, blines)
457 chunks = self._alldiffchunks(a, b, alines, blines)
@@ -464,7 +463,7 b' class filefixupstate(object):'
464 def _checkoutlinelog(self):
463 def _checkoutlinelog(self):
465 """() -> [str]. check out file contents from linelog"""
464 """() -> [str]. check out file contents from linelog"""
466 contents = []
465 contents = []
467 for i in pycompat.xrange(len(self.contents)):
466 for i in range(len(self.contents)):
468 rev = (i + 1) * 2
467 rev = (i + 1) * 2
469 self.linelog.annotate(rev)
468 self.linelog.annotate(rev)
470 content = b''.join(map(self._getline, self.linelog.annotateresult))
469 content = b''.join(map(self._getline, self.linelog.annotateresult))
@@ -606,9 +605,9 b' class filefixupstate(object):'
606 a1, a2, b1, b2 = chunk
605 a1, a2, b1, b2 = chunk
607 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
606 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
608 for idx, fa1, fa2, fb1, fb2 in fixups:
607 for idx, fa1, fa2, fb1, fb2 in fixups:
609 for i in pycompat.xrange(fa1, fa2):
608 for i in range(fa1, fa2):
610 aidxs[i - a1] = (max(idx, 1) - 1) // 2
609 aidxs[i - a1] = (max(idx, 1) - 1) // 2
611 for i in pycompat.xrange(fb1, fb2):
610 for i in range(fb1, fb2):
612 bidxs[i - b1] = (max(idx, 1) - 1) // 2
611 bidxs[i - b1] = (max(idx, 1) - 1) // 2
613
612
614 fm.startitem()
613 fm.startitem()
@@ -638,7 +637,7 b' class filefixupstate(object):'
638 )
637 )
639 fm.data(path=self.path, linetype=linetype)
638 fm.data(path=self.path, linetype=linetype)
640
639
641 for i in pycompat.xrange(a1, a2):
640 for i in range(a1, a2):
642 writeline(
641 writeline(
643 aidxs[i - a1],
642 aidxs[i - a1],
644 b'-',
643 b'-',
@@ -646,7 +645,7 b' class filefixupstate(object):'
646 b'deleted',
645 b'deleted',
647 b'diff.deleted',
646 b'diff.deleted',
648 )
647 )
649 for i in pycompat.xrange(b1, b2):
648 for i in range(b1, b2):
650 writeline(
649 writeline(
651 bidxs[i - b1],
650 bidxs[i - b1],
652 b'+',
651 b'+',
@@ -656,7 +655,7 b' class filefixupstate(object):'
656 )
655 )
657
656
658
657
659 class fixupstate(object):
658 class fixupstate:
660 """state needed to run absorb
659 """state needed to run absorb
661
660
662 internally, it keeps paths and filefixupstates.
661 internally, it keeps paths and filefixupstates.
@@ -734,7 +733,7 b' class fixupstate(object):'
734
733
735 def apply(self):
734 def apply(self):
736 """apply fixups to individual filefixupstates"""
735 """apply fixups to individual filefixupstates"""
737 for path, state in pycompat.iteritems(self.fixupmap):
736 for path, state in self.fixupmap.items():
738 if self.ui.debugflag:
737 if self.ui.debugflag:
739 self.ui.write(_(b'applying fixups to %s\n') % path)
738 self.ui.write(_(b'applying fixups to %s\n') % path)
740 state.apply()
739 state.apply()
@@ -742,10 +741,7 b' class fixupstate(object):'
742 @property
741 @property
743 def chunkstats(self):
742 def chunkstats(self):
744 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
743 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
745 return {
744 return {path: state.chunkstats for path, state in self.fixupmap.items()}
746 path: state.chunkstats
747 for path, state in pycompat.iteritems(self.fixupmap)
748 }
749
745
750 def commit(self):
746 def commit(self):
751 """commit changes. update self.finalnode, self.replacemap"""
747 """commit changes. update self.finalnode, self.replacemap"""
@@ -763,7 +759,7 b' class fixupstate(object):'
763 chunkstats = self.chunkstats
759 chunkstats = self.chunkstats
764 if ui.verbose:
760 if ui.verbose:
765 # chunkstats for each file
761 # chunkstats for each file
766 for path, stat in pycompat.iteritems(chunkstats):
762 for path, stat in chunkstats.items():
767 if stat[0]:
763 if stat[0]:
768 ui.write(
764 ui.write(
769 _(b'%s: %d of %d chunk(s) applied\n')
765 _(b'%s: %d of %d chunk(s) applied\n')
@@ -846,7 +842,7 b' class fixupstate(object):'
846 repo = self.repo
842 repo = self.repo
847 needupdate = [
843 needupdate = [
848 (name, self.replacemap[hsh])
844 (name, self.replacemap[hsh])
849 for name, hsh in pycompat.iteritems(repo._bookmarks)
845 for name, hsh in repo._bookmarks.items()
850 if hsh in self.replacemap
846 if hsh in self.replacemap
851 ]
847 ]
852 changes = []
848 changes = []
@@ -909,7 +905,7 b' class fixupstate(object):'
909 # ctx changes more files (not a subset of memworkingcopy)
905 # ctx changes more files (not a subset of memworkingcopy)
910 if not set(ctx.files()).issubset(set(memworkingcopy)):
906 if not set(ctx.files()).issubset(set(memworkingcopy)):
911 return False
907 return False
912 for path, content in pycompat.iteritems(memworkingcopy):
908 for path, content in memworkingcopy.items():
913 if path not in pctx or path not in ctx:
909 if path not in pctx or path not in ctx:
914 return False
910 return False
915 fctx = ctx[path]
911 fctx = ctx[path]
@@ -952,7 +948,7 b' class fixupstate(object):'
952 def _cleanupoldcommits(self):
948 def _cleanupoldcommits(self):
953 replacements = {
949 replacements = {
954 k: ([v] if v is not None else [])
950 k: ([v] if v is not None else [])
955 for k, v in pycompat.iteritems(self.replacemap)
951 for k, v in self.replacemap.items()
956 }
952 }
957 if replacements:
953 if replacements:
958 scmutil.cleanupnodes(
954 scmutil.cleanupnodes(
@@ -1002,7 +998,7 b' def overlaydiffcontext(ctx, chunks):'
1002 if not path or not info:
998 if not path or not info:
1003 continue
999 continue
1004 patchmap[path].append(info)
1000 patchmap[path].append(info)
1005 for path, patches in pycompat.iteritems(patchmap):
1001 for path, patches in patchmap.items():
1006 if path not in ctx or not patches:
1002 if path not in ctx or not patches:
1007 continue
1003 continue
1008 patches.sort(reverse=True)
1004 patches.sort(reverse=True)
@@ -1049,6 +1045,10 b' def absorb(ui, repo, stack=None, targetc'
1049 origchunks = patch.parsepatch(diff)
1045 origchunks = patch.parsepatch(diff)
1050 chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
1046 chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0]
1051 targetctx = overlaydiffcontext(stack[-1], chunks)
1047 targetctx = overlaydiffcontext(stack[-1], chunks)
1048 if opts.get(b'edit_lines'):
1049 # If we're going to open the editor, don't ask the user to confirm
1050 # first
1051 opts[b'apply_changes'] = True
1052 fm = None
1052 fm = None
1053 if opts.get(b'print_changes') or not opts.get(b'apply_changes'):
1053 if opts.get(b'print_changes') or not opts.get(b'apply_changes'):
1054 fm = ui.formatter(b'absorb', opts)
1054 fm = ui.formatter(b'absorb', opts)
@@ -1066,7 +1066,7 b' def absorb(ui, repo, stack=None, targetc'
1066 fm.context(ctx=ctx)
1066 fm.context(ctx=ctx)
1067 fm.data(linetype=b'changeset')
1067 fm.data(linetype=b'changeset')
1068 fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node')
1068 fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node')
1069 descfirstline = ctx.description().splitlines()[0]
1069 descfirstline = stringutil.firstline(ctx.description())
1070 fm.write(
1070 fm.write(
1071 b'descfirstline',
1071 b'descfirstline',
1072 b'%s\n',
1072 b'%s\n',
@@ -213,14 +213,12 b' 3) Deny access to a file to anyone but u'
213
213
214 '''
214 '''
215
215
216 from __future__ import absolute_import
217
216
218 from mercurial.i18n import _
217 from mercurial.i18n import _
219 from mercurial import (
218 from mercurial import (
220 error,
219 error,
221 extensions,
220 extensions,
222 match,
221 match,
223 pycompat,
224 registrar,
222 registrar,
225 util,
223 util,
226 )
224 )
@@ -453,7 +451,7 b' def _txnhook(ui, repo, hooktype, node, s'
453 allow = buildmatch(ui, repo, user, b'acl.allow')
451 allow = buildmatch(ui, repo, user, b'acl.allow')
454 deny = buildmatch(ui, repo, user, b'acl.deny')
452 deny = buildmatch(ui, repo, user, b'acl.deny')
455
453
456 for rev in pycompat.xrange(repo[node].rev(), len(repo)):
454 for rev in range(repo[node].rev(), len(repo)):
457 ctx = repo[rev]
455 ctx = repo[rev]
458 branch = ctx.branch()
456 branch = ctx.branch()
459 if denybranches and denybranches(branch):
457 if denybranches and denybranches(branch):
@@ -10,7 +10,6 b' This extension provides an ``amend`` com'
10 ``commit --amend`` but does not prompt an editor.
10 ``commit --amend`` but does not prompt an editor.
11 """
11 """
12
12
13 from __future__ import absolute_import
14
13
15 from mercurial.i18n import _
14 from mercurial.i18n import _
16 from mercurial import (
15 from mercurial import (
@@ -24,7 +24,6 b' The threshold at which a file is conside'
24 #
24 #
25 # See http://markmail.org/thread/5pxnljesvufvom57 for context.
25 # See http://markmail.org/thread/5pxnljesvufvom57 for context.
26
26
27 from __future__ import absolute_import
28
27
29 from mercurial.i18n import _
28 from mercurial.i18n import _
30 from mercurial import (
29 from mercurial import (
@@ -11,14 +11,12 b''
11 A terminal with UTF-8 support and monospace narrow text are required.
11 A terminal with UTF-8 support and monospace narrow text are required.
12 '''
12 '''
13
13
14 from __future__ import absolute_import
15
14
16 from mercurial.i18n import _
15 from mercurial.i18n import _
17 from mercurial import (
16 from mercurial import (
18 encoding,
17 encoding,
19 extensions,
18 extensions,
20 graphmod,
19 graphmod,
21 pycompat,
22 templatekw,
20 templatekw,
23 )
21 )
24
22
@@ -54,7 +52,7 b' def prettyedge(before, edge, after):'
54 def convertedges(line):
52 def convertedges(line):
55 line = b' %s ' % line
53 line = b' %s ' % line
56 pretty = []
54 pretty = []
57 for idx in pycompat.xrange(len(line) - 2):
55 for idx in range(len(line) - 2):
58 pretty.append(
56 pretty.append(
59 prettyedge(
57 prettyedge(
60 line[idx : idx + 1],
58 line[idx : idx + 1],
@@ -42,7 +42,6 b' Examples::'
42
42
43 """
43 """
44
44
45 from __future__ import absolute_import
46
45
47 import re
46 import re
48
47
@@ -106,7 +105,7 b" configitem(b'blackbox', b'date-format', "
106 _lastlogger = loggingutil.proxylogger()
105 _lastlogger = loggingutil.proxylogger()
107
106
108
107
109 class blackboxlogger(object):
108 class blackboxlogger:
110 def __init__(self, ui, repo):
109 def __init__(self, ui, repo):
111 self._repo = repo
110 self._repo = repo
112 self._trackedevents = set(ui.configlist(b'blackbox', b'track'))
111 self._trackedevents = set(ui.configlist(b'blackbox', b'track'))
@@ -13,7 +13,6 b''
13 :hg up|co NAME: switch to bookmark
13 :hg up|co NAME: switch to bookmark
14 :hg push -B .: push active bookmark
14 :hg push -B .: push active bookmark
15 """
15 """
16 from __future__ import absolute_import
17
16
18 from mercurial.i18n import _
17 from mercurial.i18n import _
19 from mercurial import (
18 from mercurial import (
@@ -291,7 +291,6 b' All the above add a comment to the Bugzi'
291 Changeset commit comment. Bug 1234.
291 Changeset commit comment. Bug 1234.
292 '''
292 '''
293
293
294 from __future__ import absolute_import
295
294
296 import json
295 import json
297 import re
296 import re
@@ -435,7 +434,7 b' configitem('
435 )
434 )
436
435
437
436
438 class bzaccess(object):
437 class bzaccess:
439 '''Base class for access to Bugzilla.'''
438 '''Base class for access to Bugzilla.'''
440
439
441 def __init__(self, ui):
440 def __init__(self, ui):
@@ -691,7 +690,7 b' class bzmysql_3_0(bzmysql_2_18):'
691 # Bugzilla via XMLRPC interface.
690 # Bugzilla via XMLRPC interface.
692
691
693
692
694 class cookietransportrequest(object):
693 class cookietransportrequest:
695 """A Transport request method that retains cookies over its lifetime.
694 """A Transport request method that retains cookies over its lifetime.
696
695
697 The regular xmlrpclib transports ignore cookies. Which causes
696 The regular xmlrpclib transports ignore cookies. Which causes
@@ -1096,7 +1095,7 b' class bzrestapi(bzaccess):'
1096 pass
1095 pass
1097
1096
1098
1097
1099 class bugzilla(object):
1098 class bugzilla:
1100 # supported versions of bugzilla. different versions have
1099 # supported versions of bugzilla. different versions have
1101 # different schemas.
1100 # different schemas.
1102 _versions = {
1101 _versions = {
@@ -28,7 +28,6 b' A few informative commands such as ``hg '
28 ignore censored data and merely report that it was encountered.
28 ignore censored data and merely report that it was encountered.
29 """
29 """
30
30
31 from __future__ import absolute_import
32
31
33 from mercurial.i18n import _
32 from mercurial.i18n import _
34 from mercurial.node import short
33 from mercurial.node import short
@@ -14,7 +14,6 b' This extension is deprecated. You should'
14 "children(REV)"` instead.
14 "children(REV)"` instead.
15 '''
15 '''
16
16
17 from __future__ import absolute_import
18
17
19 from mercurial.i18n import _
18 from mercurial.i18n import _
20 from mercurial import (
19 from mercurial import (
@@ -8,7 +8,6 b''
8
8
9 '''command to display statistics about repository history'''
9 '''command to display statistics about repository history'''
10
10
11 from __future__ import absolute_import, division
12
11
13 import datetime
12 import datetime
14 import os
13 import os
@@ -202,7 +202,6 b' instructions when a failure occurs, thus'
202 Mercurial server when the bundle hosting service fails.
202 Mercurial server when the bundle hosting service fails.
203 """
203 """
204
204
205 from __future__ import absolute_import
206
205
207 from mercurial import (
206 from mercurial import (
208 bundlecaches,
207 bundlecaches,
@@ -5,7 +5,6 b''
5
5
6 '''close arbitrary heads without checking them out first'''
6 '''close arbitrary heads without checking them out first'''
7
7
8 from __future__ import absolute_import
9
8
10 from mercurial.i18n import _
9 from mercurial.i18n import _
11 from mercurial import (
10 from mercurial import (
@@ -7,7 +7,6 b''
7
7
8 '''adds a new flag extras to commit (ADVANCED)'''
8 '''adds a new flag extras to commit (ADVANCED)'''
9
9
10 from __future__ import absolute_import
11
10
12 import re
11 import re
13
12
@@ -7,7 +7,6 b''
7
7
8 '''import revisions from foreign VCS repositories into Mercurial'''
8 '''import revisions from foreign VCS repositories into Mercurial'''
9
9
10 from __future__ import absolute_import
11
10
12 from mercurial.i18n import _
11 from mercurial.i18n import _
13 from mercurial import registrar
12 from mercurial import registrar
@@ -8,7 +8,6 b''
8 # This module is for handling Breezy imports or `brz`, but it's also compatible
8 # This module is for handling Breezy imports or `brz`, but it's also compatible
9 # with Bazaar or `bzr`, that was formerly known as Bazaar-NG;
9 # with Bazaar or `bzr`, that was formerly known as Bazaar-NG;
10 # it cannot access `bar` repositories, but they were never used very much.
10 # it cannot access `bar` repositories, but they were never used very much.
11 from __future__ import absolute_import
12
11
13 import os
12 import os
14
13
@@ -16,7 +15,6 b' from mercurial.i18n import _'
16 from mercurial import (
15 from mercurial import (
17 demandimport,
16 demandimport,
18 error,
17 error,
19 pycompat,
20 util,
18 util,
21 )
19 )
22 from . import common
20 from . import common
@@ -210,7 +208,7 b' class bzr_source(common.converter_source'
210 if not branch.supports_tags():
208 if not branch.supports_tags():
211 return {}
209 return {}
212 tagdict = branch.tags.get_tag_dict()
210 tagdict = branch.tags.get_tag_dict()
213 for name, rev in pycompat.iteritems(tagdict):
211 for name, rev in tagdict.items():
214 bytetags[self.recode(name)] = rev
212 bytetags[self.recode(name)] = rev
215 return bytetags
213 return bytetags
216
214
@@ -4,12 +4,11 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import base64
8 import base64
10 import datetime
9 import datetime
11 import errno
12 import os
10 import os
11 import pickle
13 import re
12 import re
14 import shlex
13 import shlex
15 import subprocess
14 import subprocess
@@ -25,7 +24,6 b' from mercurial import ('
25 )
24 )
26 from mercurial.utils import procutil
25 from mercurial.utils import procutil
27
26
28 pickle = util.pickle
29 propertycache = util.propertycache
27 propertycache = util.propertycache
30
28
31
29
@@ -35,7 +33,7 b' def _encodeornone(d):'
35 return d.encode('latin1')
33 return d.encode('latin1')
36
34
37
35
38 class _shlexpy3proxy(object):
36 class _shlexpy3proxy:
39 def __init__(self, l):
37 def __init__(self, l):
40 self._l = l
38 self._l = l
41
39
@@ -56,45 +54,25 b' class _shlexpy3proxy(object):'
56
54
57 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
55 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
58 if data is None:
56 if data is None:
59 if pycompat.ispy3:
57 data = open(filepath, b'r', encoding='latin1')
60 data = open(filepath, b'r', encoding='latin1')
61 else:
62 data = open(filepath, b'r')
63 else:
58 else:
64 if filepath is not None:
59 if filepath is not None:
65 raise error.ProgrammingError(
60 raise error.ProgrammingError(
66 b'shlexer only accepts data or filepath, not both'
61 b'shlexer only accepts data or filepath, not both'
67 )
62 )
68 if pycompat.ispy3:
63 data = data.decode('latin1')
69 data = data.decode('latin1')
70 l = shlex.shlex(data, infile=filepath, posix=True)
64 l = shlex.shlex(data, infile=filepath, posix=True)
71 if whitespace is not None:
65 if whitespace is not None:
72 l.whitespace_split = True
66 l.whitespace_split = True
73 if pycompat.ispy3:
67 l.whitespace += whitespace.decode('latin1')
74 l.whitespace += whitespace.decode('latin1')
75 else:
76 l.whitespace += whitespace
77 if wordchars is not None:
68 if wordchars is not None:
78 if pycompat.ispy3:
69 l.wordchars += wordchars.decode('latin1')
79 l.wordchars += wordchars.decode('latin1')
70 return _shlexpy3proxy(l)
80 else:
81 l.wordchars += wordchars
82 if pycompat.ispy3:
83 return _shlexpy3proxy(l)
84 return l
85
86
87 if pycompat.ispy3:
88 base64_encodebytes = base64.encodebytes
89 base64_decodebytes = base64.decodebytes
90 else:
91 base64_encodebytes = base64.encodestring
92 base64_decodebytes = base64.decodestring
93
71
94
72
95 def encodeargs(args):
73 def encodeargs(args):
96 def encodearg(s):
74 def encodearg(s):
97 lines = base64_encodebytes(s)
75 lines = base64.encodebytes(s)
98 lines = [l.splitlines()[0] for l in pycompat.iterbytestr(lines)]
76 lines = [l.splitlines()[0] for l in pycompat.iterbytestr(lines)]
99 return b''.join(lines)
77 return b''.join(lines)
100
78
@@ -103,7 +81,7 b' def encodeargs(args):'
103
81
104
82
105 def decodeargs(s):
83 def decodeargs(s):
106 s = base64_decodebytes(s)
84 s = base64.decodebytes(s)
107 return pickle.loads(s)
85 return pickle.loads(s)
108
86
109
87
@@ -128,7 +106,7 b' class NoRepo(Exception):'
128 SKIPREV = b'SKIP'
106 SKIPREV = b'SKIP'
129
107
130
108
131 class commit(object):
109 class commit:
132 def __init__(
110 def __init__(
133 self,
111 self,
134 author,
112 author,
@@ -158,7 +136,7 b' class commit(object):'
158 self.ctx = ctx # for hg to hg conversions
136 self.ctx = ctx # for hg to hg conversions
159
137
160
138
161 class converter_source(object):
139 class converter_source:
162 """Conversion source interface"""
140 """Conversion source interface"""
163
141
164 def __init__(self, ui, repotype, path=None, revs=None):
142 def __init__(self, ui, repotype, path=None, revs=None):
@@ -247,7 +225,7 b' class converter_source(object):'
247 if not encoding:
225 if not encoding:
248 encoding = self.encoding or b'utf-8'
226 encoding = self.encoding or b'utf-8'
249
227
250 if isinstance(s, pycompat.unicode):
228 if isinstance(s, str):
251 return s.encode("utf-8")
229 return s.encode("utf-8")
252 try:
230 try:
253 return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
231 return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
@@ -308,7 +286,7 b' class converter_source(object):'
308 return True
286 return True
309
287
310
288
311 class converter_sink(object):
289 class converter_sink:
312 """Conversion sink (target) interface"""
290 """Conversion sink (target) interface"""
313
291
314 def __init__(self, ui, repotype, path):
292 def __init__(self, ui, repotype, path):
@@ -404,7 +382,7 b' class converter_sink(object):'
404 raise NotImplementedError
382 raise NotImplementedError
405
383
406
384
407 class commandline(object):
385 class commandline:
408 def __init__(self, ui, command):
386 def __init__(self, ui, command):
409 self.ui = ui
387 self.ui = ui
410 self.command = command
388 self.command = command
@@ -418,7 +396,7 b' class commandline(object):'
418 def _cmdline(self, cmd, *args, **kwargs):
396 def _cmdline(self, cmd, *args, **kwargs):
419 kwargs = pycompat.byteskwargs(kwargs)
397 kwargs = pycompat.byteskwargs(kwargs)
420 cmdline = [self.command, cmd] + list(args)
398 cmdline = [self.command, cmd] + list(args)
421 for k, v in pycompat.iteritems(kwargs):
399 for k, v in kwargs.items():
422 if len(k) == 1:
400 if len(k) == 1:
423 cmdline.append(b'-' + k)
401 cmdline.append(b'-' + k)
424 else:
402 else:
@@ -549,11 +527,9 b' class mapfile(dict):'
549 return
527 return
550 try:
528 try:
551 fp = open(self.path, b'rb')
529 fp = open(self.path, b'rb')
552 except IOError as err:
530 except FileNotFoundError:
553 if err.errno != errno.ENOENT:
554 raise
555 return
531 return
556 for i, line in enumerate(util.iterfile(fp)):
532 for i, line in enumerate(fp):
557 line = line.splitlines()[0].rstrip()
533 line = line.splitlines()[0].rstrip()
558 if not line:
534 if not line:
559 # Ignore blank lines
535 # Ignore blank lines
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import collections
8 import collections
10 import os
9 import os
@@ -87,7 +86,7 b' def readauthormap(ui, authorfile, author'
87
86
88
87
89 def recode(s):
88 def recode(s):
90 if isinstance(s, pycompat.unicode):
89 if isinstance(s, str):
91 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
90 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
92 else:
91 else:
93 return s.decode('utf-8').encode(
92 return s.decode('utf-8').encode(
@@ -177,7 +176,7 b' def convertsink(ui, path, type):'
177 raise error.Abort(_(b'%s: unknown repository type') % path)
176 raise error.Abort(_(b'%s: unknown repository type') % path)
178
177
179
178
180 class progresssource(object):
179 class progresssource:
181 def __init__(self, ui, source, filecount):
180 def __init__(self, ui, source, filecount):
182 self.ui = ui
181 self.ui = ui
183 self.source = source
182 self.source = source
@@ -199,7 +198,7 b' class progresssource(object):'
199 self.progress.complete()
198 self.progress.complete()
200
199
201
200
202 class converter(object):
201 class converter:
203 def __init__(self, ui, source, dest, revmapfile, opts):
202 def __init__(self, ui, source, dest, revmapfile, opts):
204
203
205 self.source = source
204 self.source = source
@@ -243,7 +242,7 b' class converter(object):'
243 m = {}
242 m = {}
244 try:
243 try:
245 fp = open(path, b'rb')
244 fp = open(path, b'rb')
246 for i, line in enumerate(util.iterfile(fp)):
245 for i, line in enumerate(fp):
247 line = line.splitlines()[0].rstrip()
246 line = line.splitlines()[0].rstrip()
248 if not line:
247 if not line:
249 # Ignore blank lines
248 # Ignore blank lines
@@ -585,9 +584,7 b' class converter(object):'
585 # write another hash correspondence to override the
584 # write another hash correspondence to override the
586 # previous one so we don't end up with extra tag heads
585 # previous one so we don't end up with extra tag heads
587 tagsparents = [
586 tagsparents = [
588 e
587 e for e in self.map.items() if e[1] == tagsparent
589 for e in pycompat.iteritems(self.map)
590 if e[1] == tagsparent
591 ]
588 ]
592 if tagsparents:
589 if tagsparents:
593 self.map[tagsparents[0][0]] = nrev
590 self.map[tagsparents[0][0]] = nrev
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import errno
8 import errno
10 import os
9 import os
@@ -19,7 +18,6 b' from mercurial.pycompat import ('
19 from mercurial import (
18 from mercurial import (
20 encoding,
19 encoding,
21 error,
20 error,
22 pycompat,
23 util,
21 util,
24 )
22 )
25 from mercurial.utils import (
23 from mercurial.utils import (
@@ -317,7 +315,7 b' class convert_cvs(converter_source):'
317 if full:
315 if full:
318 raise error.Abort(_(b"convert from cvs does not support --full"))
316 raise error.Abort(_(b"convert from cvs does not support --full"))
319 self._parse()
317 self._parse()
320 return sorted(pycompat.iteritems(self.files[rev])), {}, set()
318 return sorted(self.files[rev].items()), {}, set()
321
319
322 def getcommit(self, rev):
320 def getcommit(self, rev):
323 self._parse()
321 self._parse()
@@ -4,10 +4,10 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import functools
8 import functools
10 import os
9 import os
10 import pickle
11 import re
11 import re
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
@@ -25,10 +25,8 b' from mercurial.utils import ('
25 stringutil,
25 stringutil,
26 )
26 )
27
27
28 pickle = util.pickle
29
28
30
29 class logentry:
31 class logentry(object):
32 """Class logentry has the following attributes:
30 """Class logentry has the following attributes:
33 .author - author name as CVS knows it
31 .author - author name as CVS knows it
34 .branch - name of branch this revision is on
32 .branch - name of branch this revision is on
@@ -468,7 +466,7 b' def createlog(ui, directory=None, root=b'
468
466
469 # find the branches starting from this revision
467 # find the branches starting from this revision
470 branchpoints = set()
468 branchpoints = set()
471 for branch, revision in pycompat.iteritems(branchmap):
469 for branch, revision in branchmap.items():
472 revparts = tuple([int(i) for i in revision.split(b'.')])
470 revparts = tuple([int(i) for i in revision.split(b'.')])
473 if len(revparts) < 2: # bad tags
471 if len(revparts) < 2: # bad tags
474 continue
472 continue
@@ -579,7 +577,7 b' def createlog(ui, directory=None, root=b'
579 return log
577 return log
580
578
581
579
582 class changeset(object):
580 class changeset:
583 """Class changeset has the following attributes:
581 """Class changeset has the following attributes:
584 .id - integer identifying this changeset (list index)
582 .id - integer identifying this changeset (list index)
585 .author - author name as CVS knows it
583 .author - author name as CVS knows it
@@ -834,7 +832,7 b' def createchangeset(ui, log, fuzz=60, me'
834 # branchpoints such that it is the latest possible
832 # branchpoints such that it is the latest possible
835 # commit without any intervening, unrelated commits.
833 # commit without any intervening, unrelated commits.
836
834
837 for candidate in pycompat.xrange(i):
835 for candidate in range(i):
838 if c.branch not in changesets[candidate].branchpoints:
836 if c.branch not in changesets[candidate].branchpoints:
839 if p is not None:
837 if p is not None:
840 break
838 break
@@ -4,9 +4,7 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import errno
10 import os
8 import os
11 import re
9 import re
12 import shutil
10 import shutil
@@ -114,7 +112,7 b' class darcs_source(common.converter_sour'
114 shutil.rmtree(self.tmppath, ignore_errors=True)
112 shutil.rmtree(self.tmppath, ignore_errors=True)
115
113
116 def recode(self, s, encoding=None):
114 def recode(self, s, encoding=None):
117 if isinstance(s, pycompat.unicode):
115 if isinstance(s, str):
118 # XMLParser returns unicode objects for anything it can't
116 # XMLParser returns unicode objects for anything it can't
119 # encode into ASCII. We convert them back to str to get
117 # encode into ASCII. We convert them back to str to get
120 # recode's normal conversion behavior.
118 # recode's normal conversion behavior.
@@ -231,10 +229,8 b' class darcs_source(common.converter_sour'
231 try:
229 try:
232 data = util.readfile(path)
230 data = util.readfile(path)
233 mode = os.lstat(path).st_mode
231 mode = os.lstat(path).st_mode
234 except IOError as inst:
232 except FileNotFoundError:
235 if inst.errno == errno.ENOENT:
233 return None, None
236 return None, None
237 raise
238 mode = (mode & 0o111) and b'x' or b''
234 mode = (mode & 0o111) and b'x' or b''
239 return data, mode
235 return data, mode
240
236
@@ -4,7 +4,6 b''
4 # This software may be used and distributed according to the terms of the
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6
6
7 from __future__ import absolute_import, print_function
8
7
9 import posixpath
8 import posixpath
10
9
@@ -42,7 +41,7 b' def normalize(path):'
42 return posixpath.normpath(path)
41 return posixpath.normpath(path)
43
42
44
43
45 class filemapper(object):
44 class filemapper:
46 """Map and filter filenames when importing.
45 """Map and filter filenames when importing.
47 A name can be mapped to itself, a new name, or None (omit from new
46 A name can be mapped to itself, a new name, or None (omit from new
48 repository)."""
47 repository)."""
@@ -126,7 +125,7 b' class filemapper(object):'
126 repo belong to the source repo and what parts don't."""
125 repo belong to the source repo and what parts don't."""
127 if self.targetprefixes is None:
126 if self.targetprefixes is None:
128 self.targetprefixes = set()
127 self.targetprefixes = set()
129 for before, after in pycompat.iteritems(self.rename):
128 for before, after in self.rename.items():
130 self.targetprefixes.add(after)
129 self.targetprefixes.add(after)
131
130
132 # If "." is a target, then all target files are considered from the
131 # If "." is a target, then all target files are considered from the
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import os
8 import os
10
9
@@ -20,7 +19,7 b' from mercurial import ('
20 from . import common
19 from . import common
21
20
22
21
23 class submodule(object):
22 class submodule:
24 def __init__(self, path, node, url):
23 def __init__(self, path, node, url):
25 self.path = path
24 self.path = path
26 self.node = node
25 self.node = node
@@ -5,7 +5,6 b''
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 from __future__ import absolute_import
9
8
10 import os
9 import os
11 import shutil
10 import shutil
@@ -28,7 +27,7 b' from . import common'
28
27
29
28
30 class gnuarch_source(common.converter_source, common.commandline):
29 class gnuarch_source(common.converter_source, common.commandline):
31 class gnuarch_rev(object):
30 class gnuarch_rev:
32 def __init__(self, rev):
31 def __init__(self, rev):
33 self.rev = rev
32 self.rev = rev
34 self.summary = b''
33 self.summary = b''
@@ -16,7 +16,6 b''
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19 from __future__ import absolute_import
20
19
21 import os
20 import os
22 import re
21 import re
@@ -40,7 +39,6 b' from mercurial import ('
40 merge as mergemod,
39 merge as mergemod,
41 mergestate,
40 mergestate,
42 phases,
41 phases,
43 pycompat,
44 util,
42 util,
45 )
43 )
46 from mercurial.utils import dateutil
44 from mercurial.utils import dateutil
@@ -139,7 +137,7 b' class mercurial_sink(common.converter_si'
139
137
140 if missings:
138 if missings:
141 self.after()
139 self.after()
142 for pbranch, heads in sorted(pycompat.iteritems(missings)):
140 for pbranch, heads in sorted(missings.items()):
143 pbranchpath = os.path.join(self.path, pbranch)
141 pbranchpath = os.path.join(self.path, pbranch)
144 prepo = hg.peer(self.ui, {}, pbranchpath)
142 prepo = hg.peer(self.ui, {}, pbranchpath)
145 self.ui.note(
143 self.ui.note(
@@ -424,7 +422,7 b' class mercurial_sink(common.converter_si'
424 tagparent = tagparent or self.repo.nullid
422 tagparent = tagparent or self.repo.nullid
425
423
426 oldlines = set()
424 oldlines = set()
427 for branch, heads in pycompat.iteritems(self.repo.branchmap()):
425 for branch, heads in self.repo.branchmap().items():
428 for h in heads:
426 for h in heads:
429 if b'.hgtags' in self.repo[h]:
427 if b'.hgtags' in self.repo[h]:
430 oldlines.update(
428 oldlines.update(
@@ -596,7 +594,7 b' class mercurial_source(common.converter_'
596 maappend = ma.append
594 maappend = ma.append
597 rappend = r.append
595 rappend = r.append
598 d = ctx1.manifest().diff(ctx2.manifest())
596 d = ctx1.manifest().diff(ctx2.manifest())
599 for f, ((node1, flag1), (node2, flag2)) in pycompat.iteritems(d):
597 for f, ((node1, flag1), (node2, flag2)) in d.items():
600 if node2 is None:
598 if node2 is None:
601 rappend(f)
599 rappend(f)
602 else:
600 else:
@@ -622,7 +620,7 b' class mercurial_source(common.converter_'
622 cleanp2 = set()
620 cleanp2 = set()
623 if len(parents) == 2:
621 if len(parents) == 2:
624 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
622 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
625 for f, value in pycompat.iteritems(d):
623 for f, value in d.items():
626 if value is None:
624 if value is None:
627 cleanp2.add(f)
625 cleanp2.add(f)
628 changes = [(f, rev) for f in files if f not in self.ignored]
626 changes = [(f, rev) for f in files if f not in self.ignored]
@@ -5,7 +5,6 b''
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 from __future__ import absolute_import
9
8
10 import os
9 import os
11 import re
10 import re
@@ -103,7 +102,7 b' class monotone_source(common.converter_s'
103 # Prepare the command in automate stdio format
102 # Prepare the command in automate stdio format
104 kwargs = pycompat.byteskwargs(kwargs)
103 kwargs = pycompat.byteskwargs(kwargs)
105 command = []
104 command = []
106 for k, v in pycompat.iteritems(kwargs):
105 for k, v in kwargs.items():
107 command.append(b"%d:%s" % (len(k), k))
106 command.append(b"%d:%s" % (len(k), k))
108 if v:
107 if v:
109 command.append(b"%d:%s" % (len(v), v))
108 command.append(b"%d:%s" % (len(v), v))
@@ -151,7 +150,7 b' class monotone_source(common.converter_s'
151 raise error.Abort(_(b'bad mtn packet - no end of packet size'))
150 raise error.Abort(_(b'bad mtn packet - no end of packet size'))
152 lengthstr += read
151 lengthstr += read
153 try:
152 try:
154 length = pycompat.long(lengthstr[:-1])
153 length = int(lengthstr[:-1])
155 except TypeError:
154 except TypeError:
156 raise error.Abort(
155 raise error.Abort(
157 _(b'bad mtn packet - bad packet size %s') % lengthstr
156 _(b'bad mtn packet - bad packet size %s') % lengthstr
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import marshal
8 import marshal
10 import re
9 import re
@@ -1,11 +1,11 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 from __future__ import absolute_import
5
4
6 import codecs
5 import codecs
7 import locale
6 import locale
8 import os
7 import os
8 import pickle
9 import re
9 import re
10 import xml.dom.minidom
10 import xml.dom.minidom
11
11
@@ -26,7 +26,6 b' from mercurial.utils import ('
26
26
27 from . import common
27 from . import common
28
28
29 pickle = util.pickle
30 stringio = util.stringio
29 stringio = util.stringio
31 propertycache = util.propertycache
30 propertycache = util.propertycache
32 urlerr = util.urlerr
31 urlerr = util.urlerr
@@ -181,7 +180,7 b' def optrev(number):'
181 return optrev
180 return optrev
182
181
183
182
184 class changedpath(object):
183 class changedpath:
185 def __init__(self, p):
184 def __init__(self, p):
186 self.copyfrom_path = p.copyfrom_path
185 self.copyfrom_path = p.copyfrom_path
187 self.copyfrom_rev = p.copyfrom_rev
186 self.copyfrom_rev = p.copyfrom_rev
@@ -203,7 +202,7 b' def get_log_child('
203 def receiver(orig_paths, revnum, author, date, message, pool):
202 def receiver(orig_paths, revnum, author, date, message, pool):
204 paths = {}
203 paths = {}
205 if orig_paths is not None:
204 if orig_paths is not None:
206 for k, v in pycompat.iteritems(orig_paths):
205 for k, v in orig_paths.items():
207 paths[k] = changedpath(v)
206 paths[k] = changedpath(v)
208 pickle.dump((paths, revnum, author, date, message), fp, protocol)
207 pickle.dump((paths, revnum, author, date, message), fp, protocol)
209
208
@@ -249,7 +248,7 b' def debugsvnlog(ui, **opts):'
249 get_log_child(ui.fout, *args)
248 get_log_child(ui.fout, *args)
250
249
251
250
252 class logstream(object):
251 class logstream:
253 """Interruptible revision log iterator."""
252 """Interruptible revision log iterator."""
254
253
255 def __init__(self, stdout):
254 def __init__(self, stdout):
@@ -298,7 +297,7 b' class directlogstream(list):'
298 def receiver(orig_paths, revnum, author, date, message, pool):
297 def receiver(orig_paths, revnum, author, date, message, pool):
299 paths = {}
298 paths = {}
300 if orig_paths is not None:
299 if orig_paths is not None:
301 for k, v in pycompat.iteritems(orig_paths):
300 for k, v in orig_paths.items():
302 paths[k] = changedpath(v)
301 paths[k] = changedpath(v)
303 self.append((paths, revnum, author, date, message))
302 self.append((paths, revnum, author, date, message))
304
303
@@ -365,32 +364,6 b' protomap = {'
365 }
364 }
366
365
367
366
368 class NonUtf8PercentEncodedBytes(Exception):
369 pass
370
371
372 # Subversion paths are Unicode. Since the percent-decoding is done on
373 # UTF-8-encoded strings, percent-encoded bytes are interpreted as UTF-8.
374 def url2pathname_like_subversion(unicodepath):
375 if pycompat.ispy3:
376 # On Python 3, we have to pass unicode to urlreq.url2pathname().
377 # Percent-decoded bytes get decoded using UTF-8 and the 'replace' error
378 # handler.
379 unicodepath = urlreq.url2pathname(unicodepath)
380 if u'\N{REPLACEMENT CHARACTER}' in unicodepath:
381 raise NonUtf8PercentEncodedBytes
382 else:
383 return unicodepath
384 else:
385 # If we passed unicode on Python 2, it would be converted using the
386 # latin-1 encoding. Therefore, we pass UTF-8-encoded bytes.
387 unicodepath = urlreq.url2pathname(unicodepath.encode('utf-8'))
388 try:
389 return unicodepath.decode('utf-8')
390 except UnicodeDecodeError:
391 raise NonUtf8PercentEncodedBytes
392
393
394 def issvnurl(ui, url):
367 def issvnurl(ui, url):
395 try:
368 try:
396 proto, path = url.split(b'://', 1)
369 proto, path = url.split(b'://', 1)
@@ -413,9 +386,15 b' def issvnurl(ui, url):'
413 % pycompat.sysbytes(fsencoding)
386 % pycompat.sysbytes(fsencoding)
414 )
387 )
415 return False
388 return False
416 try:
389
417 unicodepath = url2pathname_like_subversion(unicodepath)
390 # Subversion paths are Unicode. Since it does percent-decoding on
418 except NonUtf8PercentEncodedBytes:
391 # UTF-8-encoded strings, percent-encoded bytes are interpreted as
392 # UTF-8.
393 # On Python 3, we have to pass unicode to urlreq.url2pathname().
394 # Percent-decoded bytes get decoded using UTF-8 and the 'replace'
395 # error handler.
396 unicodepath = urlreq.url2pathname(unicodepath)
397 if u'\N{REPLACEMENT CHARACTER}' in unicodepath:
419 ui.warn(
398 ui.warn(
420 _(
399 _(
421 b'Subversion does not support non-UTF-8 '
400 b'Subversion does not support non-UTF-8 '
@@ -423,6 +402,7 b' def issvnurl(ui, url):'
423 )
402 )
424 )
403 )
425 return False
404 return False
405
426 # Below, we approximate how Subversion checks the path. On Unix, we
406 # Below, we approximate how Subversion checks the path. On Unix, we
427 # should therefore convert the path to bytes using `fsencoding`
407 # should therefore convert the path to bytes using `fsencoding`
428 # (like Subversion does). On Windows, the right thing would
408 # (like Subversion does). On Windows, the right thing would
@@ -730,7 +710,7 b' class svn_source(converter_source):'
730 )
710 )
731 files = [
711 files = [
732 n
712 n
733 for n, e in pycompat.iteritems(entries)
713 for n, e in entries.items()
734 if e.kind == svn.core.svn_node_file
714 if e.kind == svn.core.svn_node_file
735 ]
715 ]
736 self.removed = set()
716 self.removed = set()
@@ -820,7 +800,7 b' class svn_source(converter_source):'
820 origpaths = []
800 origpaths = []
821 copies = [
801 copies = [
822 (e.copyfrom_path, e.copyfrom_rev, p)
802 (e.copyfrom_path, e.copyfrom_rev, p)
823 for p, e in pycompat.iteritems(origpaths)
803 for p, e in origpaths.items()
824 if e.copyfrom_path
804 if e.copyfrom_path
825 ]
805 ]
826 # Apply moves/copies from more specific to general
806 # Apply moves/copies from more specific to general
@@ -851,7 +831,7 b' class svn_source(converter_source):'
851 # be represented in mercurial.
831 # be represented in mercurial.
852 addeds = {
832 addeds = {
853 p: e.copyfrom_path
833 p: e.copyfrom_path
854 for p, e in pycompat.iteritems(origpaths)
834 for p, e in origpaths.items()
855 if e.action == b'A' and e.copyfrom_path
835 if e.action == b'A' and e.copyfrom_path
856 }
836 }
857 badroots = set()
837 badroots = set()
@@ -1140,7 +1120,7 b' class svn_source(converter_source):'
1140 parents = []
1120 parents = []
1141 # check whether this revision is the start of a branch or part
1121 # check whether this revision is the start of a branch or part
1142 # of a branch renaming
1122 # of a branch renaming
1143 orig_paths = sorted(pycompat.iteritems(orig_paths))
1123 orig_paths = sorted(orig_paths.items())
1144 root_paths = [
1124 root_paths = [
1145 (p, e) for p, e in orig_paths if self.module.startswith(p)
1125 (p, e) for p, e in orig_paths if self.module.startswith(p)
1146 ]
1126 ]
@@ -1302,7 +1282,7 b' class svn_source(converter_source):'
1302 path += b'/'
1282 path += b'/'
1303 return (
1283 return (
1304 (path + p)
1284 (path + p)
1305 for p, e in pycompat.iteritems(entries)
1285 for p, e in entries.items()
1306 if e.kind == svn.core.svn_node_file
1286 if e.kind == svn.core.svn_node_file
1307 )
1287 )
1308
1288
@@ -16,7 +16,6 b''
16
16
17 # You should have received a copy of the GNU General Public License
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, see <http://www.gnu.org/licenses/>.
18 # along with this program; if not, see <http://www.gnu.org/licenses/>.
19 from __future__ import absolute_import
20
19
21 import svn.client
20 import svn.client
22 import svn.core
21 import svn.core
@@ -71,7 +70,7 b' class NotBranchError(SubversionException'
71 pass
70 pass
72
71
73
72
74 class SvnRaTransport(object):
73 class SvnRaTransport:
75 """
74 """
76 Open an ra connection to a Subversion repository.
75 Open an ra connection to a Subversion repository.
77 """
76 """
@@ -108,7 +107,7 b' class SvnRaTransport(object):'
108 self.ra = ra
107 self.ra = ra
109 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
108 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
110
109
111 class Reporter(object):
110 class Reporter:
112 def __init__(self, reporter_data):
111 def __init__(self, reporter_data):
113 self._reporter, self._baton = reporter_data
112 self._reporter, self._baton = reporter_data
114
113
@@ -91,7 +91,6 b' See :hg:`help patterns` for more informa'
91 used.
91 used.
92 """
92 """
93
93
94 from __future__ import absolute_import
95
94
96 import os
95 import os
97 import re
96 import re
@@ -186,7 +185,7 b' filters = {'
186 }
185 }
187
186
188
187
189 class eolfile(object):
188 class eolfile:
190 def __init__(self, ui, root, data):
189 def __init__(self, ui, root, data):
191 self._decode = {
190 self._decode = {
192 b'LF': b'to-lf',
191 b'LF': b'to-lf',
@@ -310,7 +309,7 b' def _checkhook(ui, repo, node, headsonly'
310 ensureenabled(ui)
309 ensureenabled(ui)
311 files = set()
310 files = set()
312 revs = set()
311 revs = set()
313 for rev in pycompat.xrange(repo[node].rev(), len(repo)):
312 for rev in range(repo[node].rev(), len(repo)):
314 revs.add(rev)
313 revs.add(rev)
315 if headsonly:
314 if headsonly:
316 ctx = repo[rev]
315 ctx = repo[rev]
@@ -379,7 +378,7 b' def reposetup(ui, repo):'
379
378
380 if not repo.local():
379 if not repo.local():
381 return
380 return
382 for name, fn in pycompat.iteritems(filters):
381 for name, fn in filters.items():
383 repo.adddatafilter(name, fn)
382 repo.adddatafilter(name, fn)
384
383
385 ui.setconfig(b'patch', b'eol', b'auto', b'eol')
384 ui.setconfig(b'patch', b'eol', b'auto', b'eol')
@@ -81,7 +81,6 b' needed files, so running the external di'
81 pretty fast (at least faster than having to compare the entire tree).
81 pretty fast (at least faster than having to compare the entire tree).
82 '''
82 '''
83
83
84 from __future__ import absolute_import
85
84
86 import os
85 import os
87 import re
86 import re
@@ -696,7 +695,7 b' def extdiff(ui, repo, *pats, **opts):'
696 return dodiff(ui, repo, cmdline, pats, opts)
695 return dodiff(ui, repo, cmdline, pats, opts)
697
696
698
697
699 class savedcmd(object):
698 class savedcmd:
700 """use external program to diff repository (or selected files)
699 """use external program to diff repository (or selected files)
701
700
702 Show differences between revisions for the specified files, using
701 Show differences between revisions for the specified files, using
@@ -45,7 +45,6 b' service entry controls the service name '
45
45
46 '''
46 '''
47
47
48 from __future__ import absolute_import
49
48
50 import os
49 import os
51 from mercurial.i18n import _
50 from mercurial.i18n import _
@@ -101,7 +101,6 b' annotate cache greatly. Run "debugbuildl'
101 #
101 #
102 # * format changes to the revmap file (maybe use length-encoding
102 # * format changes to the revmap file (maybe use length-encoding
103 # instead of null-terminated file paths at least?)
103 # instead of null-terminated file paths at least?)
104 from __future__ import absolute_import
105
104
106 from mercurial.i18n import _
105 from mercurial.i18n import _
107 from mercurial import (
106 from mercurial import (
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import os
9 import os
11
10
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import collections
9 import collections
11 import contextlib
10 import contextlib
@@ -76,7 +75,7 b' def _decorate(fctx):'
76 linecount = text.count(b'\n')
75 linecount = text.count(b'\n')
77 if text and not text.endswith(b'\n'):
76 if text and not text.endswith(b'\n'):
78 linecount += 1
77 linecount += 1
79 return ([(fctx, i) for i in pycompat.xrange(linecount)], text)
78 return ([(fctx, i) for i in range(linecount)], text)
80
79
81
80
82 # extracted from mercurial.context.basefilectx.annotate. slightly modified
81 # extracted from mercurial.context.basefilectx.annotate. slightly modified
@@ -160,7 +159,7 b' def hashdiffopts(diffopts):'
160 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
159 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
161
160
162
161
163 class annotateopts(object):
162 class annotateopts:
164 """like mercurial.mdiff.diffopts, but is for annotate
163 """like mercurial.mdiff.diffopts, but is for annotate
165
164
166 followrename: follow renames, like "hg annotate -f"
165 followrename: follow renames, like "hg annotate -f"
@@ -175,7 +174,7 b' class annotateopts(object):'
175
174
176 def __init__(self, **opts):
175 def __init__(self, **opts):
177 opts = pycompat.byteskwargs(opts)
176 opts = pycompat.byteskwargs(opts)
178 for k, v in pycompat.iteritems(self.defaults):
177 for k, v in self.defaults.items():
179 setattr(self, k, opts.get(k, v))
178 setattr(self, k, opts.get(k, v))
180
179
181 @util.propertycache
180 @util.propertycache
@@ -197,7 +196,7 b' class annotateopts(object):'
197 defaultopts = annotateopts()
196 defaultopts = annotateopts()
198
197
199
198
200 class _annotatecontext(object):
199 class _annotatecontext:
201 """do not use this class directly as it does not use lock to protect
200 """do not use this class directly as it does not use lock to protect
202 writes. use "with annotatecontext(...)" instead.
201 writes. use "with annotatecontext(...)" instead.
203 """
202 """
@@ -578,13 +577,13 b' class _annotatecontext(object):'
578 result = [None] * len(annotateresult)
577 result = [None] * len(annotateresult)
579 # {(rev, linenum): [lineindex]}
578 # {(rev, linenum): [lineindex]}
580 key2idxs = collections.defaultdict(list)
579 key2idxs = collections.defaultdict(list)
581 for i in pycompat.xrange(len(result)):
580 for i in range(len(result)):
582 key2idxs[(revs[i], annotateresult[i][1])].append(i)
581 key2idxs[(revs[i], annotateresult[i][1])].append(i)
583 while key2idxs:
582 while key2idxs:
584 # find an unresolved line and its linelog rev to annotate
583 # find an unresolved line and its linelog rev to annotate
585 hsh = None
584 hsh = None
586 try:
585 try:
587 for (rev, _linenum), idxs in pycompat.iteritems(key2idxs):
586 for (rev, _linenum), idxs in key2idxs.items():
588 if revmap.rev2flag(rev) & revmapmod.sidebranchflag:
587 if revmap.rev2flag(rev) & revmapmod.sidebranchflag:
589 continue
588 continue
590 hsh = annotateresult[idxs[0]][0]
589 hsh = annotateresult[idxs[0]][0]
@@ -595,7 +594,7 b' class _annotatecontext(object):'
595 # the remaining key2idxs are not in main branch, resolving them
594 # the remaining key2idxs are not in main branch, resolving them
596 # using the hard way...
595 # using the hard way...
597 revlines = {}
596 revlines = {}
598 for (rev, linenum), idxs in pycompat.iteritems(key2idxs):
597 for (rev, linenum), idxs in key2idxs.items():
599 if rev not in revlines:
598 if rev not in revlines:
600 hsh = annotateresult[idxs[0]][0]
599 hsh = annotateresult[idxs[0]][0]
601 if self.ui.debugflag:
600 if self.ui.debugflag:
@@ -784,7 +783,7 b' def _unlinkpaths(paths):'
784 pass
783 pass
785
784
786
785
787 class pathhelper(object):
786 class pathhelper:
788 """helper for getting paths for lockfile, linelog and revmap"""
787 """helper for getting paths for lockfile, linelog and revmap"""
789
788
790 def __init__(self, repo, path, opts=defaultopts):
789 def __init__(self, repo, path, opts=defaultopts):
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9
8
10 class CorruptedFileError(Exception):
9 class CorruptedFileError(Exception):
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 from mercurial.node import (
8 from mercurial.node import (
10 hex,
9 hex,
@@ -20,7 +19,7 b' from mercurial.utils import dateutil'
20
19
21 # imitating mercurial.commands.annotate, not using the vanilla formatter since
20 # imitating mercurial.commands.annotate, not using the vanilla formatter since
22 # the data structures are a bit different, and we have some fast paths.
21 # the data structures are a bit different, and we have some fast paths.
23 class defaultformatter(object):
22 class defaultformatter:
24 """the default formatter that does leftpad and support some common flags"""
23 """the default formatter that does leftpad and support some common flags"""
25
24
26 def __init__(self, ui, repo, opts):
25 def __init__(self, ui, repo, opts):
@@ -94,7 +93,7 b' class defaultformatter(object):'
94
93
95 # buffered output
94 # buffered output
96 result = b''
95 result = b''
97 for i in pycompat.xrange(len(annotatedresult)):
96 for i in range(len(annotatedresult)):
98 for j, p in enumerate(pieces):
97 for j, p in enumerate(pieces):
99 sep = self.funcmap[j][1]
98 sep = self.funcmap[j][1]
100 padding = b' ' * (maxwidths[j] - len(p[i]))
99 padding = b' ' * (maxwidths[j] - len(p[i]))
@@ -149,7 +148,7 b' class jsonformatter(defaultformatter):'
149
148
150 result = b''
149 result = b''
151 lasti = len(annotatedresult) - 1
150 lasti = len(annotatedresult) - 1
152 for i in pycompat.xrange(len(annotatedresult)):
151 for i in range(len(annotatedresult)):
153 result += b'\n {\n'
152 result += b'\n {\n'
154 for j, p in enumerate(pieces):
153 for j, p in enumerate(pieces):
155 k, vs = p
154 k, vs = p
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import contextlib
8 import contextlib
10 import os
9 import os
@@ -15,7 +14,6 b' from mercurial import ('
15 error,
14 error,
16 extensions,
15 extensions,
17 hg,
16 hg,
18 pycompat,
19 util,
17 util,
20 wireprotov1peer,
18 wireprotov1peer,
21 wireprotov1server,
19 wireprotov1server,
@@ -190,7 +188,7 b' def clientfetch(repo, paths, lastnodemap'
190 for result in results:
188 for result in results:
191 r = result.result()
189 r = result.result()
192 # TODO: pconvert these paths on the server?
190 # TODO: pconvert these paths on the server?
193 r = {util.pconvert(p): v for p, v in pycompat.iteritems(r)}
191 r = {util.pconvert(p): v for p, v in r.items()}
194 for path in sorted(r):
192 for path in sorted(r):
195 # ignore malicious paths
193 # ignore malicious paths
196 if not path.startswith(b'fastannotate/') or b'/../' in (
194 if not path.startswith(b'fastannotate/') or b'/../' in (
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import bisect
9 import bisect
11 import io
10 import io
@@ -16,7 +15,6 b' from mercurial.node import hex'
16 from mercurial.pycompat import open
15 from mercurial.pycompat import open
17 from mercurial import (
16 from mercurial import (
18 error as hgerror,
17 error as hgerror,
19 pycompat,
20 )
18 )
21 from . import error
19 from . import error
22
20
@@ -49,7 +47,7 b' renameflag = 2'
49 _hshlen = 20
47 _hshlen = 20
50
48
51
49
52 class revmap(object):
50 class revmap:
53 """trivial hg bin hash - linelog rev bidirectional map
51 """trivial hg bin hash - linelog rev bidirectional map
54
52
55 also stores a flag (uint8) for each revision, and track renames.
53 also stores a flag (uint8) for each revision, and track renames.
@@ -166,13 +164,11 b' class revmap(object):'
166 if self._lastmaxrev == -1: # write the entire file
164 if self._lastmaxrev == -1: # write the entire file
167 with open(self.path, b'wb') as f:
165 with open(self.path, b'wb') as f:
168 f.write(self.HEADER)
166 f.write(self.HEADER)
169 for i in pycompat.xrange(1, len(self._rev2hsh)):
167 for i in range(1, len(self._rev2hsh)):
170 self._writerev(i, f)
168 self._writerev(i, f)
171 else: # append incrementally
169 else: # append incrementally
172 with open(self.path, b'ab') as f:
170 with open(self.path, b'ab') as f:
173 for i in pycompat.xrange(
171 for i in range(self._lastmaxrev + 1, len(self._rev2hsh)):
174 self._lastmaxrev + 1, len(self._rev2hsh)
175 ):
176 self._writerev(i, f)
172 self._writerev(i, f)
177 self._lastmaxrev = self.maxrev
173 self._lastmaxrev = self.maxrev
178
174
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from mercurial.pycompat import getattr
9 from mercurial.pycompat import getattr
11 from mercurial import (
10 from mercurial import (
@@ -23,7 +22,7 b' from . import ('
23 )
22 )
24
23
25
24
26 class _lazyfctx(object):
25 class _lazyfctx:
27 """delegates to fctx but do not construct fctx when unnecessary"""
26 """delegates to fctx but do not construct fctx when unnecessary"""
28
27
29 def __init__(self, repo, node, path):
28 def __init__(self, repo, node, path):
@@ -7,7 +7,6 b''
7 # The format specification for fast-import streams can be found at
7 # The format specification for fast-import streams can be found at
8 # https://git-scm.com/docs/git-fast-import#_input_format
8 # https://git-scm.com/docs/git-fast-import#_input_format
9
9
10 from __future__ import absolute_import
11 import re
10 import re
12
11
13 from mercurial.i18n import _
12 from mercurial.i18n import _
@@ -7,7 +7,6 b''
7
7
8 '''pull, update and merge in one command (DEPRECATED)'''
8 '''pull, update and merge in one command (DEPRECATED)'''
9
9
10 from __future__ import absolute_import
11
10
12 from mercurial.i18n import _
11 from mercurial.i18n import _
13 from mercurial.node import short
12 from mercurial.node import short
@@ -122,7 +122,6 b' amended into the revision being fixed; f'
122 file content back to stdout as documented above.
122 file content back to stdout as documented above.
123 """
123 """
124
124
125 from __future__ import absolute_import
126
125
127 import collections
126 import collections
128 import itertools
127 import itertools
@@ -378,9 +377,7 b' def cleanup(repo, replacements, wdirwrit'
378 Useful as a hook point for extending "hg fix" with output summarizing the
377 Useful as a hook point for extending "hg fix" with output summarizing the
379 effects of the command, though we choose not to output anything here.
378 effects of the command, though we choose not to output anything here.
380 """
379 """
381 replacements = {
380 replacements = {prec: [succ] for prec, succ in replacements.items()}
382 prec: [succ] for prec, succ in pycompat.iteritems(replacements)
383 }
384 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
381 scmutil.cleanupnodes(repo, replacements, b'fix', fixphase=True)
385
382
386
383
@@ -693,7 +690,7 b' def fixfile(ui, repo, opts, fixers, fixc'
693 """
690 """
694 metadata = {}
691 metadata = {}
695 newdata = fixctx[path].data()
692 newdata = fixctx[path].data()
696 for fixername, fixer in pycompat.iteritems(fixers):
693 for fixername, fixer in fixers.items():
697 if fixer.affects(opts, fixctx, path):
694 if fixer.affects(opts, fixctx, path):
698 ranges = lineranges(
695 ranges = lineranges(
699 opts, path, basepaths, basectxs, fixctx, newdata
696 opts, path, basepaths, basectxs, fixctx, newdata
@@ -771,7 +768,7 b' def writeworkingdir(repo, ctx, filedata,'
771
768
772 Directly updates the dirstate for the affected files.
769 Directly updates the dirstate for the affected files.
773 """
770 """
774 for path, data in pycompat.iteritems(filedata):
771 for path, data in filedata.items():
775 fctx = ctx[path]
772 fctx = ctx[path]
776 fctx.write(data, fctx.flags())
773 fctx.write(data, fctx.flags())
777
774
@@ -906,7 +903,7 b' def fixernames(ui):'
906 return names
903 return names
907
904
908
905
909 class Fixer(object):
906 class Fixer:
910 """Wraps the raw config values for a fixer with methods"""
907 """Wraps the raw config values for a fixer with methods"""
911
908
912 def __init__(
909 def __init__(
@@ -107,7 +107,6 b' created.'
107 # The issues related to nested repos and subrepos are probably not fundamental
107 # The issues related to nested repos and subrepos are probably not fundamental
108 # ones. Patches to fix them are welcome.
108 # ones. Patches to fix them are welcome.
109
109
110 from __future__ import absolute_import
111
110
112 import codecs
111 import codecs
113 import os
112 import os
@@ -336,7 +335,7 b' def overridewalk(orig, self, match, subr'
336 nonnormalset = {
335 nonnormalset = {
337 f
336 f
338 for f, e in self._map.items()
337 for f, e in self._map.items()
339 if e.v1_state() != b"n" or e.v1_mtime() == -1
338 if e._v1_state() != b"n" or e._v1_mtime() == -1
340 }
339 }
341
340
342 copymap = self._map.copymap
341 copymap = self._map.copymap
@@ -502,15 +501,11 b' def overridewalk(orig, self, match, subr'
502 visit.update(f for f in copymap if f not in results and matchfn(f))
501 visit.update(f for f in copymap if f not in results and matchfn(f))
503 else:
502 else:
504 if matchalways:
503 if matchalways:
505 visit.update(
504 visit.update(f for f, st in dmap.items() if f not in results)
506 f for f, st in pycompat.iteritems(dmap) if f not in results
507 )
508 visit.update(f for f in copymap if f not in results)
505 visit.update(f for f in copymap if f not in results)
509 else:
506 else:
510 visit.update(
507 visit.update(
511 f
508 f for f, st in dmap.items() if f not in results and matchfn(f)
512 for f, st in pycompat.iteritems(dmap)
513 if f not in results and matchfn(f)
514 )
509 )
515 visit.update(f for f in copymap if f not in results and matchfn(f))
510 visit.update(f for f in copymap if f not in results and matchfn(f))
516
511
@@ -686,7 +681,7 b' def overridestatus('
686 )
681 )
687
682
688
683
689 class poststatus(object):
684 class poststatus:
690 def __init__(self, startclock):
685 def __init__(self, startclock):
691 self._startclock = pycompat.sysbytes(startclock)
686 self._startclock = pycompat.sysbytes(startclock)
692
687
@@ -761,7 +756,7 b' def wrapsymlink(orig, source, link_name)'
761 pass
756 pass
762
757
763
758
764 class state_update(object):
759 class state_update:
765 """This context manager is responsible for dispatching the state-enter
760 """This context manager is responsible for dispatching the state-enter
766 and state-leave signals to the watchman service. The enter and leave
761 and state-leave signals to the watchman service. The enter and leave
767 methods can be invoked manually (for scenarios where context manager
762 methods can be invoked manually (for scenarios where context manager
@@ -27,7 +27,6 b''
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
28
29 # no unicode literals
29 # no unicode literals
30 from __future__ import absolute_import, division, print_function
31
30
32 import inspect
31 import inspect
33 import math
32 import math
@@ -302,7 +301,7 b' class CommandError(WatchmanError):'
302 )
301 )
303
302
304
303
305 class Transport(object):
304 class Transport:
306 """communication transport to the watchman server"""
305 """communication transport to the watchman server"""
307
306
308 buf = None
307 buf = None
@@ -347,7 +346,7 b' class Transport(object):'
347 self.buf.append(b)
346 self.buf.append(b)
348
347
349
348
350 class Codec(object):
349 class Codec:
351 """communication encoding for the watchman server"""
350 """communication encoding for the watchman server"""
352
351
353 transport = None
352 transport = None
@@ -860,7 +859,7 b' class JsonCodec(Codec):'
860 self.transport.write(cmd + b"\n")
859 self.transport.write(cmd + b"\n")
861
860
862
861
863 class client(object):
862 class client:
864 """Handles the communication with the watchman service"""
863 """Handles the communication with the watchman service"""
865
864
866 sockpath = None
865 sockpath = None
@@ -27,7 +27,6 b''
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
28
29 # no unicode literals
29 # no unicode literals
30 from __future__ import absolute_import, division, print_function
31
30
32
31
33 def parse_version(vstr):
32 def parse_version(vstr):
@@ -27,7 +27,6 b''
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
28
29 # no unicode literals
29 # no unicode literals
30 from __future__ import absolute_import, division, print_function
31
30
32 import sys
31 import sys
33
32
@@ -27,7 +27,6 b''
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
28
29 # no unicode literals
29 # no unicode literals
30 from __future__ import absolute_import, division, print_function
31
30
32 import sys
31 import sys
33
32
@@ -27,7 +27,6 b''
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
28
29 # no unicode literals
29 # no unicode literals
30 from __future__ import absolute_import, division, print_function
31
30
32 import ctypes
31 import ctypes
33
32
@@ -27,7 +27,6 b''
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
28
29 # no unicode literals
29 # no unicode literals
30 from __future__ import absolute_import, division, print_function
31
30
32 import binascii
31 import binascii
33 import collections
32 import collections
@@ -94,7 +93,7 b' def _buf_pos(buf, pos):'
94 return ret
93 return ret
95
94
96
95
97 class _bser_buffer(object):
96 class _bser_buffer:
98 def __init__(self, version):
97 def __init__(self, version):
99 self.bser_version = version
98 self.bser_version = version
100 self.buf = ctypes.create_string_buffer(8192)
99 self.buf = ctypes.create_string_buffer(8192)
@@ -325,7 +324,7 b' def dumps(obj, version=1, capabilities=0'
325 # This is a quack-alike with the bserObjectType in bser.c
324 # This is a quack-alike with the bserObjectType in bser.c
326 # It provides by getattr accessors and getitem for both index
325 # It provides by getattr accessors and getitem for both index
327 # and name.
326 # and name.
328 class _BunserDict(object):
327 class _BunserDict:
329 __slots__ = ("_keys", "_values")
328 __slots__ = ("_keys", "_values")
330
329
331 def __init__(self, keys, values):
330 def __init__(self, keys, values):
@@ -351,7 +350,7 b' class _BunserDict(object):'
351 return len(self._keys)
350 return len(self._keys)
352
351
353
352
354 class Bunser(object):
353 class Bunser:
355 def __init__(self, mutable=True, value_encoding=None, value_errors=None):
354 def __init__(self, mutable=True, value_encoding=None, value_errors=None):
356 self.mutable = mutable
355 self.mutable = mutable
357 self.value_encoding = value_encoding
356 self.value_encoding = value_encoding
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import errno
9 import errno
11 import os
10 import os
@@ -23,7 +22,7 b' from mercurial import ('
23 _versionformat = b">I"
22 _versionformat = b">I"
24
23
25
24
26 class state(object):
25 class state:
27 def __init__(self, repo):
26 def __init__(self, repo):
28 self._vfs = repo.vfs
27 self._vfs = repo.vfs
29 self._ui = repo.ui
28 self._ui = repo.ui
@@ -138,9 +137,8 b' class state(object):'
138 def invalidate(self):
137 def invalidate(self):
139 try:
138 try:
140 os.unlink(os.path.join(self._rootdir, b'.hg', b'fsmonitor.state'))
139 os.unlink(os.path.join(self._rootdir, b'.hg', b'fsmonitor.state'))
141 except OSError as inst:
140 except FileNotFoundError:
142 if inst.errno != errno.ENOENT:
141 pass
143 raise
144 self._identity = util.filestat(None)
142 self._identity = util.filestat(None)
145
143
146 def setlastclock(self, clock):
144 def setlastclock(self, clock):
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import getpass
9 import getpass
11
10
@@ -44,7 +43,7 b' class WatchmanNoRoot(Unavailable):'
44 super(WatchmanNoRoot, self).__init__(msg)
43 super(WatchmanNoRoot, self).__init__(msg)
45
44
46
45
47 class client(object):
46 class client:
48 def __init__(self, ui, root, timeout=1.0):
47 def __init__(self, ui, root, timeout=1.0):
49 err = None
48 err = None
50 if not self._user:
49 if not self._user:
@@ -4,7 +4,6 b' This is currently super experimental. It'
4 firstborn a la Rumpelstiltskin, etc.
4 firstborn a la Rumpelstiltskin, etc.
5 """
5 """
6
6
7 from __future__ import absolute_import
8
7
9 import os
8 import os
10
9
@@ -17,6 +16,7 b' from mercurial import ('
17 localrepo,
16 localrepo,
18 pycompat,
17 pycompat,
19 registrar,
18 registrar,
19 requirements as requirementsmod,
20 scmutil,
20 scmutil,
21 store,
21 store,
22 util,
22 util,
@@ -48,7 +48,7 b' getversion = gitutil.pygit2_version'
48
48
49
49
50 # TODO: extract an interface for this in core
50 # TODO: extract an interface for this in core
51 class gitstore(object): # store.basicstore):
51 class gitstore: # store.basicstore):
52 def __init__(self, path, vfstype):
52 def __init__(self, path, vfstype):
53 self.vfs = vfstype(path)
53 self.vfs = vfstype(path)
54 self.opener = self.vfs
54 self.opener = self.vfs
@@ -130,7 +130,7 b' def _makestore(orig, requirements, store'
130 return orig(requirements, storebasepath, vfstype)
130 return orig(requirements, storebasepath, vfstype)
131
131
132
132
133 class gitfilestorage(object):
133 class gitfilestorage:
134 def file(self, path):
134 def file(self, path):
135 if path[0:1] == b'/':
135 if path[0:1] == b'/':
136 path = path[1:]
136 path = path[1:]
@@ -162,7 +162,7 b' def _setupdothg(ui, path):'
162 _BMS_PREFIX = 'refs/heads/'
162 _BMS_PREFIX = 'refs/heads/'
163
163
164
164
165 class gitbmstore(object):
165 class gitbmstore:
166 def __init__(self, gitrepo):
166 def __init__(self, gitrepo):
167 self.gitrepo = gitrepo
167 self.gitrepo = gitrepo
168 self._aclean = True
168 self._aclean = True
@@ -301,9 +301,15 b' def reposetup(ui, repo):'
301
301
302 class gitlocalrepo(orig):
302 class gitlocalrepo(orig):
303 def _makedirstate(self):
303 def _makedirstate(self):
304 v2_req = requirementsmod.DIRSTATE_V2_REQUIREMENT
305 use_dirstate_v2 = v2_req in self.requirements
306
304 # TODO narrow support here
307 # TODO narrow support here
305 return dirstate.gitdirstate(
308 return dirstate.gitdirstate(
306 self.ui, self.vfs.base, self.store.git
309 self.ui,
310 self.vfs,
311 self.store.git,
312 use_dirstate_v2,
307 )
313 )
308
314
309 def commit(self, *args, **kwargs):
315 def commit(self, *args, **kwargs):
@@ -1,11 +1,9 b''
1 from __future__ import absolute_import
2
3 import contextlib
1 import contextlib
4 import errno
5 import os
2 import os
6
3
7 from mercurial.node import sha1nodeconstants
4 from mercurial.node import sha1nodeconstants
8 from mercurial import (
5 from mercurial import (
6 dirstatemap,
9 error,
7 error,
10 extensions,
8 extensions,
11 match as matchmod,
9 match as matchmod,
@@ -13,6 +11,9 b' from mercurial import ('
13 scmutil,
11 scmutil,
14 util,
12 util,
15 )
13 )
14 from mercurial.dirstateutils import (
15 timestamp,
16 )
16 from mercurial.interfaces import (
17 from mercurial.interfaces import (
17 dirstate as intdirstate,
18 dirstate as intdirstate,
18 util as interfaceutil,
19 util as interfaceutil,
@@ -20,6 +21,9 b' from mercurial.interfaces import ('
20
21
21 from . import gitutil
22 from . import gitutil
22
23
24
25 DirstateItem = dirstatemap.DirstateItem
26 propertycache = util.propertycache
23 pygit2 = gitutil.get_pygit2()
27 pygit2 = gitutil.get_pygit2()
24
28
25
29
@@ -28,7 +32,7 b' def readpatternfile(orig, filepath, warn'
28 return orig(filepath, warn, sourceinfo=False)
32 return orig(filepath, warn, sourceinfo=False)
29 result = []
33 result = []
30 warnings = []
34 warnings = []
31 with open(filepath, b'rb') as fp:
35 with open(filepath, 'rb') as fp:
32 for l in fp:
36 for l in fp:
33 l = l.strip()
37 l = l.strip()
34 if not l or l.startswith(b'#'):
38 if not l or l.startswith(b'#'):
@@ -68,14 +72,29 b' if pygit2:'
68
72
69
73
70 @interfaceutil.implementer(intdirstate.idirstate)
74 @interfaceutil.implementer(intdirstate.idirstate)
71 class gitdirstate(object):
75 class gitdirstate:
72 def __init__(self, ui, root, gitrepo):
76 def __init__(self, ui, vfs, gitrepo, use_dirstate_v2):
73 self._ui = ui
77 self._ui = ui
74 self._root = os.path.dirname(root)
78 self._root = os.path.dirname(vfs.base)
79 self._opener = vfs
75 self.git = gitrepo
80 self.git = gitrepo
76 self._plchangecallbacks = {}
81 self._plchangecallbacks = {}
77 # TODO: context.poststatusfixup is bad and uses this attribute
82 # TODO: context.poststatusfixup is bad and uses this attribute
78 self._dirty = False
83 self._dirty = False
84 self._mapcls = dirstatemap.dirstatemap
85 self._use_dirstate_v2 = use_dirstate_v2
86
87 @propertycache
88 def _map(self):
89 """Return the dirstate contents (see documentation for dirstatemap)."""
90 self._map = self._mapcls(
91 self._ui,
92 self._opener,
93 self._root,
94 sha1nodeconstants,
95 self._use_dirstate_v2,
96 )
97 return self._map
79
98
80 def p1(self):
99 def p1(self):
81 try:
100 try:
@@ -144,6 +163,13 b' class gitdirstate(object):'
144 [],
163 [],
145 [],
164 [],
146 )
165 )
166
167 try:
168 mtime_boundary = timestamp.get_fs_now(self._opener)
169 except OSError:
170 # In largefiles or readonly context
171 mtime_boundary = None
172
147 gstatus = self.git.status()
173 gstatus = self.git.status()
148 for path, status in gstatus.items():
174 for path, status in gstatus.items():
149 path = pycompat.fsencode(path)
175 path = pycompat.fsencode(path)
@@ -195,6 +221,7 b' class gitdirstate(object):'
195 scmutil.status(
221 scmutil.status(
196 modified, added, removed, deleted, unknown, ignored, clean
222 modified, added, removed, deleted, unknown, ignored, clean
197 ),
223 ),
224 mtime_boundary,
198 )
225 )
199
226
200 def flagfunc(self, buildfallback):
227 def flagfunc(self, buildfallback):
@@ -207,6 +234,13 b' class gitdirstate(object):'
207 os.path.dirname(pycompat.fsencode(self.git.path))
234 os.path.dirname(pycompat.fsencode(self.git.path))
208 )
235 )
209
236
237 def get_entry(self, path):
238 """return a DirstateItem for the associated path"""
239 entry = self._map.get(path)
240 if entry is None:
241 return DirstateItem()
242 return entry
243
210 def normalize(self, path):
244 def normalize(self, path):
211 normed = util.normcase(path)
245 normed = util.normcase(path)
212 assert normed == path, b"TODO handling of case folding: %s != %s" % (
246 assert normed == path, b"TODO handling of case folding: %s != %s" % (
@@ -283,9 +317,7 b' class gitdirstate(object):'
283 # TODO construct the stat info from the status object?
317 # TODO construct the stat info from the status object?
284 try:
318 try:
285 s = os.stat(os.path.join(cwd, path))
319 s = os.stat(os.path.join(cwd, path))
286 except OSError as e:
320 except FileNotFoundError:
287 if e.errno != errno.ENOENT:
288 raise
289 continue
321 continue
290 r[path] = s
322 r[path] = s
291 return r
323 return r
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 from mercurial.i18n import _
1 from mercurial.i18n import _
4
2
5 from mercurial.node import (
3 from mercurial.node import (
@@ -31,7 +29,7 b' from . import ('
31 pygit2 = gitutil.get_pygit2()
29 pygit2 = gitutil.get_pygit2()
32
30
33
31
34 class baselog(object): # revlog.revlog):
32 class baselog: # revlog.revlog):
35 """Common implementations between changelog and manifestlog."""
33 """Common implementations between changelog and manifestlog."""
36
34
37 def __init__(self, gr, db):
35 def __init__(self, gr, db):
@@ -71,7 +69,7 b' class baselog(object): # revlog.revlog)'
71 return t is not None
69 return t is not None
72
70
73
71
74 class baselogindex(object):
72 class baselogindex:
75 def __init__(self, log):
73 def __init__(self, log):
76 self._log = log
74 self._log = log
77
75
@@ -114,7 +112,7 b' class changelog(baselog):'
114 return False
112 return False
115
113
116 def __iter__(self):
114 def __iter__(self):
117 return iter(pycompat.xrange(len(self)))
115 return iter(range(len(self)))
118
116
119 @property
117 @property
120 def filteredrevs(self):
118 def filteredrevs(self):
@@ -188,7 +186,7 b' class changelog(baselog):'
188
186
189 def shortest(self, node, minlength=1):
187 def shortest(self, node, minlength=1):
190 nodehex = hex(node)
188 nodehex = hex(node)
191 for attempt in pycompat.xrange(minlength, len(nodehex) + 1):
189 for attempt in range(minlength, len(nodehex) + 1):
192 candidate = nodehex[:attempt]
190 candidate = nodehex[:attempt]
193 matches = int(
191 matches = int(
194 self._db.execute(
192 self._db.execute(
@@ -536,8 +534,7 b' WHERE filenode = ? AND filename = ?'
536 ).fetchone()[0]
534 ).fetchone()[0]
537 # This filelog is missing some data. Build the
535 # This filelog is missing some data. Build the
538 # filelog, then recurse (which will always find data).
536 # filelog, then recurse (which will always find data).
539 if pycompat.ispy3:
537 commit = commit.decode('ascii')
540 commit = commit.decode('ascii')
541 index.fill_in_filelog(self.gitrepo, self._db, commit, gp, gn)
538 index.fill_in_filelog(self.gitrepo, self._db, commit, gp, gn)
542 return self.parents(node)
539 return self.parents(node)
543 else:
540 else:
@@ -1,9 +1,6 b''
1 """utilities to assist in working with pygit2"""
1 """utilities to assist in working with pygit2"""
2 from __future__ import absolute_import
3
2
4 from mercurial.node import bin, hex, sha1nodeconstants
3 from mercurial.node import bin, sha1nodeconstants
5
6 from mercurial import pycompat
7
4
8 pygit2_module = None
5 pygit2_module = None
9
6
@@ -39,14 +36,12 b' def togitnode(n):'
39 pygit2 and sqlite both need nodes as strings, not bytes.
36 pygit2 and sqlite both need nodes as strings, not bytes.
40 """
37 """
41 assert len(n) == 20
38 assert len(n) == 20
42 return pycompat.sysstr(hex(n))
39 return n.hex()
43
40
44
41
45 def fromgitnode(n):
42 def fromgitnode(n):
46 """Opposite of togitnode."""
43 """Opposite of togitnode."""
47 assert len(n) == 40
44 assert len(n) == 40
48 if pycompat.ispy3:
49 return bin(n.encode('ascii'))
50 return bin(n)
45 return bin(n)
51
46
52
47
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import collections
1 import collections
4 import os
2 import os
5 import sqlite3
3 import sqlite3
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 from mercurial import (
1 from mercurial import (
4 match as matchmod,
2 match as matchmod,
5 pathutil,
3 pathutil,
@@ -17,7 +15,7 b' pygit2 = gitutil.get_pygit2()'
17
15
18
16
19 @interfaceutil.implementer(repository.imanifestdict)
17 @interfaceutil.implementer(repository.imanifestdict)
20 class gittreemanifest(object):
18 class gittreemanifest:
21 """Expose git trees (and optionally a builder's overlay) as a manifestdict.
19 """Expose git trees (and optionally a builder's overlay) as a manifestdict.
22
20
23 Very similar to mercurial.manifest.treemanifest.
21 Very similar to mercurial.manifest.treemanifest.
@@ -260,7 +258,7 b' class gittreemanifest(object):'
260
258
261
259
262 @interfaceutil.implementer(repository.imanifestrevisionstored)
260 @interfaceutil.implementer(repository.imanifestrevisionstored)
263 class gittreemanifestctx(object):
261 class gittreemanifestctx:
264 def __init__(self, repo, gittree):
262 def __init__(self, repo, gittree):
265 self._repo = repo
263 self._repo = repo
266 self._tree = gittree
264 self._tree = gittree
@@ -281,7 +279,7 b' class gittreemanifestctx(object):'
281
279
282
280
283 @interfaceutil.implementer(repository.imanifestrevisionwritable)
281 @interfaceutil.implementer(repository.imanifestrevisionwritable)
284 class memgittreemanifestctx(object):
282 class memgittreemanifestctx:
285 def __init__(self, repo, tree):
283 def __init__(self, repo, tree):
286 self._repo = repo
284 self._repo = repo
287 self._tree = tree
285 self._tree = tree
@@ -15,7 +15,6 b' If an unknown command or parameter combi'
15 produced.
15 produced.
16 """
16 """
17
17
18 from __future__ import absolute_import
19
18
20 import getopt
19 import getopt
21 import re
20 import re
@@ -116,14 +115,14 b' def parseoptions(ui, cmdoptions, args):'
116 opts = dict(
115 opts = dict(
117 [
116 [
118 (k, convert(v)) if isinstance(v, bytes) else (k, v)
117 (k, convert(v)) if isinstance(v, bytes) else (k, v)
119 for k, v in pycompat.iteritems(opts)
118 for k, v in opts.items()
120 ]
119 ]
121 )
120 )
122
121
123 return args, opts
122 return args, opts
124
123
125
124
126 class Command(object):
125 class Command:
127 def __init__(self, name):
126 def __init__(self, name):
128 self.name = name
127 self.name = name
129 self.args = []
128 self.args = []
@@ -132,7 +131,7 b' class Command(object):'
132 def __bytes__(self):
131 def __bytes__(self):
133 cmd = b"hg " + self.name
132 cmd = b"hg " + self.name
134 if self.opts:
133 if self.opts:
135 for k, values in sorted(pycompat.iteritems(self.opts)):
134 for k, values in sorted(self.opts.items()):
136 for v in values:
135 for v in values:
137 if v:
136 if v:
138 if isinstance(v, int):
137 if isinstance(v, int):
@@ -164,7 +163,7 b' class Command(object):'
164 return AndCommand(self, other)
163 return AndCommand(self, other)
165
164
166
165
167 class AndCommand(object):
166 class AndCommand:
168 def __init__(self, left, right):
167 def __init__(self, left, right):
169 self.left = left
168 self.left = left
170 self.right = right
169 self.right = right
@@ -5,7 +5,6 b''
5
5
6 '''commands to sign and verify changesets'''
6 '''commands to sign and verify changesets'''
7
7
8 from __future__ import absolute_import
9
8
10 import binascii
9 import binascii
11 import os
10 import os
@@ -65,7 +64,7 b' help.CATEGORY_ORDER.insert('
65 help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)'
64 help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)'
66
65
67
66
68 class gpg(object):
67 class gpg:
69 def __init__(self, path, key=None):
68 def __init__(self, path, key=None):
70 self.path = path
69 self.path = path
71 self.key = (key and b" --local-user \"%s\"" % key) or b""
70 self.key = (key and b" --local-user \"%s\"" % key) or b""
@@ -15,7 +15,6 b' commands. When this options is given, an'
15 revision graph is also shown.
15 revision graph is also shown.
16 '''
16 '''
17
17
18 from __future__ import absolute_import
19
18
20 from mercurial.i18n import _
19 from mercurial.i18n import _
21 from mercurial import (
20 from mercurial import (
@@ -34,7 +34,6 b' Revisions context menu will now display '
34 vdiff on hovered and selected revisions.
34 vdiff on hovered and selected revisions.
35 '''
35 '''
36
36
37 from __future__ import absolute_import
38
37
39 import os
38 import os
40
39
@@ -246,7 +245,7 b' def revtree(ui, args, repo, full=b"tree"'
246 else:
245 else:
247 i -= chunk
246 i -= chunk
248
247
249 for x in pycompat.xrange(chunk):
248 for x in range(chunk):
250 if i + x >= count:
249 if i + x >= count:
251 l[chunk - x :] = [0] * (chunk - x)
250 l[chunk - x :] = [0] * (chunk - x)
252 break
251 break
@@ -257,7 +256,7 b' def revtree(ui, args, repo, full=b"tree"'
257 else:
256 else:
258 if (i + x) in repo:
257 if (i + x) in repo:
259 l[x] = 1
258 l[x] = 1
260 for x in pycompat.xrange(chunk - 1, -1, -1):
259 for x in range(chunk - 1, -1, -1):
261 if l[x] != 0:
260 if l[x] != 0:
262 yield (i + x, full is not None and l[x] or None)
261 yield (i + x, full is not None and l[x] or None)
263 if i == 0:
262 if i == 0:
@@ -268,7 +267,7 b' def revtree(ui, args, repo, full=b"tree"'
268 if len(ar) == 0:
267 if len(ar) == 0:
269 return 1
268 return 1
270 mask = 0
269 mask = 0
271 for i in pycompat.xrange(len(ar)):
270 for i in range(len(ar)):
272 if sha in reachable[i]:
271 if sha in reachable[i]:
273 mask |= 1 << i
272 mask |= 1 << i
274
273
@@ -377,9 +376,7 b' def view(ui, repo, *etc, **opts):'
377 """start interactive history viewer"""
376 """start interactive history viewer"""
378 opts = pycompat.byteskwargs(opts)
377 opts = pycompat.byteskwargs(opts)
379 os.chdir(repo.root)
378 os.chdir(repo.root)
380 optstr = b' '.join(
379 optstr = b' '.join([b'--%s %s' % (k, v) for k, v in opts.items() if v])
381 [b'--%s %s' % (k, v) for k, v in pycompat.iteritems(opts) if v]
382 )
383 if repo.filtername is None:
380 if repo.filtername is None:
384 optstr += b'--hidden'
381 optstr += b'--hidden'
385
382
@@ -26,7 +26,6 b' Pygments will try very hard to identify '
26 match (even matches with a low confidence score) will be used.
26 match (even matches with a low confidence score) will be used.
27 """
27 """
28
28
29 from __future__ import absolute_import
30
29
31 from . import highlight
30 from . import highlight
32 from mercurial.hgweb import (
31 from mercurial.hgweb import (
@@ -8,7 +8,6 b''
8 # The original module was split in an interface and an implementation
8 # The original module was split in an interface and an implementation
9 # file to defer pygments loading and speedup extension setup.
9 # file to defer pygments loading and speedup extension setup.
10
10
11 from __future__ import absolute_import
12
11
13 from mercurial import demandimport
12 from mercurial import demandimport
14
13
@@ -190,7 +190,6 b' unexpectedly::'
190
190
191 """
191 """
192
192
193 from __future__ import absolute_import
194
193
195 # chistedit dependencies that are not available everywhere
194 # chistedit dependencies that are not available everywhere
196 try:
195 try:
@@ -200,8 +199,10 b' except ImportError:'
200 fcntl = None
199 fcntl = None
201 termios = None
200 termios = None
202
201
202 import binascii
203 import functools
203 import functools
204 import os
204 import os
205 import pickle
205 import struct
206 import struct
206
207
207 from mercurial.i18n import _
208 from mercurial.i18n import _
@@ -245,7 +246,6 b' from mercurial.utils import ('
245 urlutil,
246 urlutil,
246 )
247 )
247
248
248 pickle = util.pickle
249 cmdtable = {}
249 cmdtable = {}
250 command = registrar.command(cmdtable)
250 command = registrar.command(cmdtable)
251
251
@@ -352,7 +352,7 b' Commands:'
352 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
352 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
353
353
354
354
355 class histeditstate(object):
355 class histeditstate:
356 def __init__(self, repo):
356 def __init__(self, repo):
357 self.repo = repo
357 self.repo = repo
358 self.actions = None
358 self.actions = None
@@ -455,7 +455,7 b' class histeditstate(object):'
455 rules = []
455 rules = []
456 rulelen = int(lines[index])
456 rulelen = int(lines[index])
457 index += 1
457 index += 1
458 for i in pycompat.xrange(rulelen):
458 for i in range(rulelen):
459 ruleaction = lines[index]
459 ruleaction = lines[index]
460 index += 1
460 index += 1
461 rule = lines[index]
461 rule = lines[index]
@@ -466,7 +466,7 b' class histeditstate(object):'
466 replacements = []
466 replacements = []
467 replacementlen = int(lines[index])
467 replacementlen = int(lines[index])
468 index += 1
468 index += 1
469 for i in pycompat.xrange(replacementlen):
469 for i in range(replacementlen):
470 replacement = lines[index]
470 replacement = lines[index]
471 original = bin(replacement[:40])
471 original = bin(replacement[:40])
472 succ = [
472 succ = [
@@ -491,7 +491,7 b' class histeditstate(object):'
491 return self.repo.vfs.exists(b'histedit-state')
491 return self.repo.vfs.exists(b'histedit-state')
492
492
493
493
494 class histeditaction(object):
494 class histeditaction:
495 def __init__(self, state, node):
495 def __init__(self, state, node):
496 self.state = state
496 self.state = state
497 self.repo = state.repo
497 self.repo = state.repo
@@ -505,7 +505,7 b' class histeditaction(object):'
505 # Check for validation of rule ids and get the rulehash
505 # Check for validation of rule ids and get the rulehash
506 try:
506 try:
507 rev = bin(ruleid)
507 rev = bin(ruleid)
508 except TypeError:
508 except binascii.Error:
509 try:
509 try:
510 _ctx = scmutil.revsingle(state.repo, ruleid)
510 _ctx = scmutil.revsingle(state.repo, ruleid)
511 rulehash = _ctx.hex()
511 rulehash = _ctx.hex()
@@ -553,9 +553,7 b' class histeditaction(object):'
553 summary = cmdutil.rendertemplate(
553 summary = cmdutil.rendertemplate(
554 ctx, ui.config(b'histedit', b'summary-template')
554 ctx, ui.config(b'histedit', b'summary-template')
555 )
555 )
556 # Handle the fact that `''.splitlines() => []`
556 line = b'%s %s %s' % (self.verb, ctx, stringutil.firstline(summary))
557 summary = summary.splitlines()[0] if summary else b''
558 line = b'%s %s %s' % (self.verb, ctx, summary)
559 # trim to 75 columns by default so it's not stupidly wide in my editor
557 # trim to 75 columns by default so it's not stupidly wide in my editor
560 # (the 5 more are left for verb)
558 # (the 5 more are left for verb)
561 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
559 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
@@ -1143,7 +1141,7 b' def screen_size():'
1143 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1141 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1144
1142
1145
1143
1146 class histeditrule(object):
1144 class histeditrule:
1147 def __init__(self, ui, ctx, pos, action=b'pick'):
1145 def __init__(self, ui, ctx, pos, action=b'pick'):
1148 self.ui = ui
1146 self.ui = ui
1149 self.ctx = ctx
1147 self.ctx = ctx
@@ -1193,7 +1191,7 b' class histeditrule(object):'
1193 # This is split off from the prefix property so that we can
1191 # This is split off from the prefix property so that we can
1194 # separately make the description for 'roll' red (since it
1192 # separately make the description for 'roll' red (since it
1195 # will get discarded).
1193 # will get discarded).
1196 return self.ctx.description().splitlines()[0].strip()
1194 return stringutil.firstline(self.ctx.description())
1197
1195
1198 def checkconflicts(self, other):
1196 def checkconflicts(self, other):
1199 if other.pos > self.pos and other.origpos <= self.origpos:
1197 if other.pos > self.pos and other.origpos <= self.origpos:
@@ -1243,7 +1241,7 b' def _trunc_tail(line, n):'
1243 return line[: n - 2] + b' >'
1241 return line[: n - 2] + b' >'
1244
1242
1245
1243
1246 class _chistedit_state(object):
1244 class _chistedit_state:
1247 def __init__(
1245 def __init__(
1248 self,
1246 self,
1249 repo,
1247 repo,
@@ -1292,7 +1290,7 b' class _chistedit_state(object):'
1292 line = b"bookmark: %s" % b' '.join(bms)
1290 line = b"bookmark: %s" % b' '.join(bms)
1293 win.addstr(3, 1, line[:length])
1291 win.addstr(3, 1, line[:length])
1294
1292
1295 line = b"summary: %s" % (ctx.description().splitlines()[0])
1293 line = b"summary: %s" % stringutil.firstline(ctx.description())
1296 win.addstr(4, 1, line[:length])
1294 win.addstr(4, 1, line[:length])
1297
1295
1298 line = b"files: "
1296 line = b"files: "
@@ -1576,7 +1574,7 b' pgup/K: move patch up, pgdn/J: move patc'
1576
1574
1577 start = min(old_rule_pos, new_rule_pos)
1575 start = min(old_rule_pos, new_rule_pos)
1578 end = max(old_rule_pos, new_rule_pos)
1576 end = max(old_rule_pos, new_rule_pos)
1579 for r in pycompat.xrange(start, end + 1):
1577 for r in range(start, end + 1):
1580 rules[new_rule_pos].checkconflicts(rules[r])
1578 rules[new_rule_pos].checkconflicts(rules[r])
1581 rules[old_rule_pos].checkconflicts(rules[r])
1579 rules[old_rule_pos].checkconflicts(rules[r])
1582
1580
@@ -2102,7 +2100,7 b' def _finishhistedit(ui, repo, state, fm)'
2102
2100
2103 mapping, tmpnodes, created, ntm = processreplacement(state)
2101 mapping, tmpnodes, created, ntm = processreplacement(state)
2104 if mapping:
2102 if mapping:
2105 for prec, succs in pycompat.iteritems(mapping):
2103 for prec, succs in mapping.items():
2106 if not succs:
2104 if not succs:
2107 ui.debug(b'histedit: %s is dropped\n' % short(prec))
2105 ui.debug(b'histedit: %s is dropped\n' % short(prec))
2108 else:
2106 else:
@@ -2140,7 +2138,7 b' def _finishhistedit(ui, repo, state, fm)'
2140 nodechanges = fd(
2138 nodechanges = fd(
2141 {
2139 {
2142 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2140 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2143 for oldn, newn in pycompat.iteritems(mapping)
2141 for oldn, newn in mapping.items()
2144 },
2142 },
2145 key=b"oldnode",
2143 key=b"oldnode",
2146 value=b"newnodes",
2144 value=b"newnodes",
@@ -2322,12 +2320,7 b' def _newhistedit(ui, repo, state, revs, '
2322
2320
2323
2321
2324 def _getsummary(ctx):
2322 def _getsummary(ctx):
2325 # a common pattern is to extract the summary but default to the empty
2323 return stringutil.firstline(ctx.description())
2326 # string
2327 summary = ctx.description() or b''
2328 if summary:
2329 summary = summary.splitlines()[0]
2330 return summary
2331
2324
2332
2325
2333 def bootstrapcontinue(ui, state, opts):
2326 def bootstrapcontinue(ui, state, opts):
@@ -2388,7 +2381,7 b' def ruleeditor(repo, ui, actions, editco'
2388 tsum = summary[len(fword) + 1 :].lstrip()
2381 tsum = summary[len(fword) + 1 :].lstrip()
2389 # safe but slow: reverse iterate over the actions so we
2382 # safe but slow: reverse iterate over the actions so we
2390 # don't clash on two commits having the same summary
2383 # don't clash on two commits having the same summary
2391 for na, l in reversed(list(pycompat.iteritems(newact))):
2384 for na, l in reversed(list(newact.items())):
2392 actx = repo[na.node]
2385 actx = repo[na.node]
2393 asum = _getsummary(actx)
2386 asum = _getsummary(actx)
2394 if asum == tsum:
2387 if asum == tsum:
@@ -2401,7 +2394,7 b' def ruleeditor(repo, ui, actions, editco'
2401
2394
2402 # copy over and flatten the new list
2395 # copy over and flatten the new list
2403 actions = []
2396 actions = []
2404 for na, l in pycompat.iteritems(newact):
2397 for na, l in newact.items():
2405 actions.append(na)
2398 actions.append(na)
2406 actions += l
2399 actions += l
2407
2400
@@ -13,7 +13,6 b' implement them as individual hooks or me'
13 extension as option. The functionality itself is planned to be supported
13 extension as option. The functionality itself is planned to be supported
14 long-term.
14 long-term.
15 """
15 """
16 from __future__ import absolute_import
17 from . import (
16 from . import (
18 changeset_obsoleted,
17 changeset_obsoleted,
19 changeset_published,
18 changeset_published,
@@ -17,7 +17,6 b' Usage:'
17 python:hgext.hooklib.changeset_obsoleted.hook
17 python:hgext.hooklib.changeset_obsoleted.hook
18 """
18 """
19
19
20 from __future__ import absolute_import
21
20
22 import email.errors as emailerrors
21 import email.errors as emailerrors
23 import email.utils as emailutils
22 import email.utils as emailutils
@@ -115,7 +114,7 b' def _report_commit(ui, repo, ctx):'
115 msg['From'] = mail.addressencode(ui, sender, n.charsets, n.test)
114 msg['From'] = mail.addressencode(ui, sender, n.charsets, n.test)
116 msg['To'] = ', '.join(sorted(subs))
115 msg['To'] = ', '.join(sorted(subs))
117
116
118 msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
117 msgtext = msg.as_bytes()
119 if ui.configbool(b'notify', b'test'):
118 if ui.configbool(b'notify', b'test'):
120 ui.write(msgtext)
119 ui.write(msgtext)
121 if not msgtext.endswith(b'\n'):
120 if not msgtext.endswith(b'\n'):
@@ -17,7 +17,6 b' Usage:'
17 python:hgext.hooklib.changeset_published.hook
17 python:hgext.hooklib.changeset_published.hook
18 """
18 """
19
19
20 from __future__ import absolute_import
21
20
22 import email.errors as emailerrors
21 import email.errors as emailerrors
23 import email.utils as emailutils
22 import email.utils as emailutils
@@ -114,7 +113,7 b' def _report_commit(ui, repo, ctx):'
114 msg['From'] = mail.addressencode(ui, sender, n.charsets, n.test)
113 msg['From'] = mail.addressencode(ui, sender, n.charsets, n.test)
115 msg['To'] = ', '.join(sorted(subs))
114 msg['To'] = ', '.join(sorted(subs))
116
115
117 msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
116 msgtext = msg.as_bytes()
118 if ui.configbool(b'notify', b'test'):
117 if ui.configbool(b'notify', b'test'):
119 ui.write(msgtext)
118 ui.write(msgtext)
120 if not msgtext.endswith(b'\n'):
119 if not msgtext.endswith(b'\n'):
@@ -14,7 +14,6 b' Usage:'
14 python:hgext.hooklib.enforce_draft_commits.hook
14 python:hgext.hooklib.enforce_draft_commits.hook
15 """
15 """
16
16
17 from __future__ import absolute_import
18
17
19 from mercurial.i18n import _
18 from mercurial.i18n import _
20 from mercurial import (
19 from mercurial import (
@@ -14,7 +14,6 b' Usage:'
14 python:hgext.hooklib.reject_merge_commits.hook
14 python:hgext.hooklib.reject_merge_commits.hook
15 """
15 """
16
16
17 from __future__ import absolute_import
18
17
19 from mercurial.i18n import _
18 from mercurial.i18n import _
20 from mercurial import (
19 from mercurial import (
@@ -14,7 +14,6 b' Usage:'
14 python:hgext.hooklib.reject_new_heads.hook
14 python:hgext.hooklib.reject_new_heads.hook
15 """
15 """
16
16
17 from __future__ import absolute_import
18
17
19 from mercurial.i18n import _
18 from mercurial.i18n import _
20 from mercurial import (
19 from mercurial import (
@@ -87,11 +87,9 b' delete this code at the end of 2020.'
87 bookmarks = True
87 bookmarks = True
88 """
88 """
89
89
90 from __future__ import absolute_import
91
90
92 import collections
91 import collections
93 import contextlib
92 import contextlib
94 import errno
95 import functools
93 import functools
96 import logging
94 import logging
97 import os
95 import os
@@ -287,7 +285,7 b' def _tryhoist(ui, remotebookmark):'
287 return remotebookmark
285 return remotebookmark
288
286
289
287
290 class bundlestore(object):
288 class bundlestore:
291 def __init__(self, repo):
289 def __init__(self, repo):
292 self._repo = repo
290 self._repo = repo
293 storetype = self._repo.ui.config(b'infinitepush', b'storetype')
291 storetype = self._repo.ui.config(b'infinitepush', b'storetype')
@@ -406,7 +404,7 b' def _checkheads(orig, pushop):'
406
404
407 def wireprotolistkeyspatterns(repo, proto, namespace, patterns):
405 def wireprotolistkeyspatterns(repo, proto, namespace, patterns):
408 patterns = wireprototypes.decodelist(patterns)
406 patterns = wireprototypes.decodelist(patterns)
409 d = pycompat.iteritems(repo.listkeys(encoding.tolocal(namespace), patterns))
407 d = repo.listkeys(encoding.tolocal(namespace), patterns).items()
410 return pushkey.encodekeys(d)
408 return pushkey.encodekeys(d)
411
409
412
410
@@ -420,7 +418,7 b' def localrepolistkeys(orig, self, namesp'
420 if pattern.endswith(b'*'):
418 if pattern.endswith(b'*'):
421 pattern = b're:^' + pattern[:-1] + b'.*'
419 pattern = b're:^' + pattern[:-1] + b'.*'
422 kind, pat, matcher = stringutil.stringmatcher(pattern)
420 kind, pat, matcher = stringutil.stringmatcher(pattern)
423 for bookmark, node in pycompat.iteritems(bookmarks):
421 for bookmark, node in bookmarks.items():
424 if matcher(bookmark):
422 if matcher(bookmark):
425 results[bookmark] = node
423 results[bookmark] = node
426 return results
424 return results
@@ -543,7 +541,7 b' def _generateoutputparts(head, bundlerep'
543 if part.type == b'changegroup':
541 if part.type == b'changegroup':
544 haschangegroup = True
542 haschangegroup = True
545 newpart = bundle2.bundlepart(part.type, data=part.read())
543 newpart = bundle2.bundlepart(part.type, data=part.read())
546 for key, value in pycompat.iteritems(part.params):
544 for key, value in part.params.items():
547 newpart.addparam(key, value)
545 newpart.addparam(key, value)
548 parts.append(newpart)
546 parts.append(newpart)
549
547
@@ -795,7 +793,7 b' def _saveremotebookmarks(repo, newbookma'
795 # saveremotenames expects 20 byte binary nodes for branches
793 # saveremotenames expects 20 byte binary nodes for branches
796 branches[rname].append(bin(hexnode))
794 branches[rname].append(bin(hexnode))
797
795
798 for bookmark, hexnode in pycompat.iteritems(newbookmarks):
796 for bookmark, hexnode in newbookmarks.items():
799 bookmarks[bookmark] = hexnode
797 bookmarks[bookmark] = hexnode
800 remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks)
798 remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks)
801
799
@@ -805,7 +803,7 b' def _savelocalbookmarks(repo, bookmarks)'
805 return
803 return
806 with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
804 with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
807 changes = []
805 changes = []
808 for scratchbook, node in pycompat.iteritems(bookmarks):
806 for scratchbook, node in bookmarks.items():
809 changectx = repo[node]
807 changectx = repo[node]
810 changes.append((scratchbook, changectx.node()))
808 changes.append((scratchbook, changectx.node()))
811 repo._bookmarks.applychanges(repo, tr, changes)
809 repo._bookmarks.applychanges(repo, tr, changes)
@@ -1046,7 +1044,7 b' def storetobundlestore(orig, repo, op, u'
1046 bundle2._processpart(op, part)
1044 bundle2._processpart(op, part)
1047 else:
1045 else:
1048 bundlepart = bundle2.bundlepart(part.type, data=part.read())
1046 bundlepart = bundle2.bundlepart(part.type, data=part.read())
1049 for key, value in pycompat.iteritems(part.params):
1047 for key, value in part.params.items():
1050 bundlepart.addparam(key, value)
1048 bundlepart.addparam(key, value)
1051
1049
1052 # Certain parts require a response
1050 # Certain parts require a response
@@ -1138,7 +1136,7 b' def processparts(orig, repo, op, unbundl'
1138 # differs from previous behavior, we need to put it behind a
1136 # differs from previous behavior, we need to put it behind a
1139 # config flag for incremental rollout.
1137 # config flag for incremental rollout.
1140 bundlepart = bundle2.bundlepart(part.type, data=part.read())
1138 bundlepart = bundle2.bundlepart(part.type, data=part.read())
1141 for key, value in pycompat.iteritems(part.params):
1139 for key, value in part.params.items():
1142 bundlepart.addparam(key, value)
1140 bundlepart.addparam(key, value)
1143
1141
1144 # Certain parts require a response
1142 # Certain parts require a response
@@ -1308,9 +1306,8 b' def bundle2scratchbranch(op, part):'
1308 finally:
1306 finally:
1309 try:
1307 try:
1310 os.unlink(bundlefile)
1308 os.unlink(bundlefile)
1311 except OSError as e:
1309 except FileNotFoundError:
1312 if e.errno != errno.ENOENT:
1310 pass
1313 raise
1314
1311
1315 return 1
1312 return 1
1316
1313
@@ -1324,9 +1321,7 b' def _maybeaddpushbackpart(op, bookmark, '
1324 b'new': newnode,
1321 b'new': newnode,
1325 b'old': oldnode,
1322 b'old': oldnode,
1326 }
1323 }
1327 op.reply.newpart(
1324 op.reply.newpart(b'pushkey', mandatoryparams=params.items())
1328 b'pushkey', mandatoryparams=pycompat.iteritems(params)
1329 )
1330
1325
1331
1326
1332 def bundle2pushkey(orig, op, part):
1327 def bundle2pushkey(orig, op, part):
@@ -3,7 +3,6 b''
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
7
6
8 from mercurial.i18n import _
7 from mercurial.i18n import _
9 from mercurial.node import hex
8 from mercurial.node import hex
@@ -13,7 +12,6 b' from mercurial import ('
13 changegroup,
12 changegroup,
14 error,
13 error,
15 extensions,
14 extensions,
16 pycompat,
17 revsetlang,
15 revsetlang,
18 util,
16 util,
19 )
17 )
@@ -68,7 +66,7 b' def getscratchbranchparts(repo, peer, ou'
68 parts.append(
66 parts.append(
69 bundle2.bundlepart(
67 bundle2.bundlepart(
70 scratchbranchparttype.upper(),
68 scratchbranchparttype.upper(),
71 advisoryparams=pycompat.iteritems(params),
69 advisoryparams=params.items(),
72 data=cg,
70 data=cg,
73 )
71 )
74 )
72 )
@@ -103,7 +101,7 b' def _handlelfs(repo, missing):'
103 return
101 return
104
102
105
103
106 class copiedpart(object):
104 class copiedpart:
107 """a copy of unbundlepart content that can be consumed later"""
105 """a copy of unbundlepart content that can be consumed later"""
108
106
109 def __init__(self, part):
107 def __init__(self, part):
@@ -3,7 +3,6 b''
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
7
6
8 import os
7 import os
9
8
@@ -11,7 +11,6 b''
11 indexpath = PATH
11 indexpath = PATH
12 """
12 """
13
13
14 from __future__ import absolute_import
15
14
16 import os
15 import os
17
16
@@ -5,10 +5,8 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10
9 class indexapi:
11 class indexapi(object):
12 """Class that manages access to infinitepush index.
10 """Class that manages access to infinitepush index.
13
11
14 This class is a context manager and all write operations (like
12 This class is a context manager and all write operations (like
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import logging
9 import logging
11 import os
10 import os
@@ -14,8 +13,6 b' import time'
14 import warnings
13 import warnings
15 import mysql.connector
14 import mysql.connector
16
15
17 from mercurial import pycompat
18
19 from . import indexapi
16 from . import indexapi
20
17
21
18
@@ -180,7 +177,7 b' class sqlindexapi(indexapi.indexapi):'
180 self.sqlconnect()
177 self.sqlconnect()
181 args = []
178 args = []
182 values = []
179 values = []
183 for bookmark, node in pycompat.iteritems(bookmarks):
180 for bookmark, node in bookmarks.items():
184 args.append(b'(%s, %s, %s)')
181 args.append(b'(%s, %s, %s)')
185 values.extend((bookmark, node, self.reponame))
182 values.extend((bookmark, node, self.reponame))
186 args = b','.join(args)
183 args = b','.join(args)
@@ -3,7 +3,6 b''
3
3
4 # based on bundleheads extension by Gregory Szorc <gps@mozilla.com>
4 # based on bundleheads extension by Gregory Szorc <gps@mozilla.com>
5
5
6 from __future__ import absolute_import
7
6
8 import abc
7 import abc
9 import os
8 import os
@@ -26,7 +25,7 b' class BundleReadException(Exception):'
26 pass
25 pass
27
26
28
27
29 class abstractbundlestore(object): # pytype: disable=ignored-metaclass
28 class abstractbundlestore: # pytype: disable=ignored-metaclass
30 """Defines the interface for bundle stores.
29 """Defines the interface for bundle stores.
31
30
32 A bundle store is an entity that stores raw bundle data. It is a simple
31 A bundle store is an entity that stores raw bundle data. It is a simple
@@ -57,7 +56,7 b' class abstractbundlestore(object): # py'
57 """
56 """
58
57
59
58
60 class filebundlestore(object):
59 class filebundlestore:
61 """bundle store in filesystem
60 """bundle store in filesystem
62
61
63 meant for storing bundles somewhere on disk and on network filesystems
62 meant for storing bundles somewhere on disk and on network filesystems
@@ -11,10 +11,8 b' bookmarks were previously located.'
11
11
12 """
12 """
13
13
14 from __future__ import absolute_import
15
14
16 import collections
15 import collections
17 import errno
18 import os
16 import os
19 import weakref
17 import weakref
20
18
@@ -129,7 +127,7 b' def recordbookmarks(orig, store, fp):'
129 repo = store._repo
127 repo = store._repo
130 if util.safehasattr(repo, 'journal'):
128 if util.safehasattr(repo, 'journal'):
131 oldmarks = bookmarks.bmstore(repo)
129 oldmarks = bookmarks.bmstore(repo)
132 for mark, value in pycompat.iteritems(store):
130 for mark, value in store.items():
133 oldvalue = oldmarks.get(mark, repo.nullid)
131 oldvalue = oldmarks.get(mark, repo.nullid)
134 if value != oldvalue:
132 if value != oldvalue:
135 repo.journal.record(bookmarktype, mark, oldvalue, value)
133 repo.journal.record(bookmarktype, mark, oldvalue, value)
@@ -141,9 +139,7 b' def _readsharedfeatures(repo):'
141 """A set of shared features for this repository"""
139 """A set of shared features for this repository"""
142 try:
140 try:
143 return set(repo.vfs.read(b'shared').splitlines())
141 return set(repo.vfs.read(b'shared').splitlines())
144 except IOError as inst:
142 except FileNotFoundError:
145 if inst.errno != errno.ENOENT:
146 raise
147 return set()
143 return set()
148
144
149
145
@@ -167,7 +163,7 b' def _mergeentriesiter(*iterables, **kwar'
167 pass
163 pass
168
164
169 while iterable_map:
165 while iterable_map:
170 value, key, it = order(pycompat.itervalues(iterable_map))
166 value, key, it = order(iterable_map.values())
171 yield value
167 yield value
172 try:
168 try:
173 iterable_map[key][0] = next(it)
169 iterable_map[key][0] = next(it)
@@ -283,7 +279,7 b' class journalentry('
283 __str__ = encoding.strmethod(__bytes__)
279 __str__ = encoding.strmethod(__bytes__)
284
280
285
281
286 class journalstorage(object):
282 class journalstorage:
287 """Storage for journal entries
283 """Storage for journal entries
288
284
289 Entries are divided over two files; one with entries that pertain to the
285 Entries are divided over two files; one with entries that pertain to the
@@ -83,8 +83,6 b" like CVS' $Log$, are not supported. A ke"
83 '''
83 '''
84
84
85
85
86 from __future__ import absolute_import
87
88 import os
86 import os
89 import re
87 import re
90 import weakref
88 import weakref
@@ -237,7 +235,7 b' def _preselect(wstatus, changed):'
237 return modified, added
235 return modified, added
238
236
239
237
240 class kwtemplater(object):
238 class kwtemplater:
241 """
239 """
242 Sets up keyword templates, corresponding keyword regex, and
240 Sets up keyword templates, corresponding keyword regex, and
243 provides keyword substitution functions.
241 provides keyword substitution functions.
@@ -515,7 +513,7 b' def demo(ui, repo, *args, **opts):'
515 kwmaps = _defaultkwmaps(ui)
513 kwmaps = _defaultkwmaps(ui)
516 if uikwmaps:
514 if uikwmaps:
517 ui.status(_(b'\tdisabling current template maps\n'))
515 ui.status(_(b'\tdisabling current template maps\n'))
518 for k, v in pycompat.iteritems(kwmaps):
516 for k, v in kwmaps.items():
519 ui.setconfig(b'keywordmaps', k, v, b'keyword')
517 ui.setconfig(b'keywordmaps', k, v, b'keyword')
520 else:
518 else:
521 ui.status(_(b'\n\tconfiguration using current keyword template maps\n'))
519 ui.status(_(b'\n\tconfiguration using current keyword template maps\n'))
@@ -529,7 +527,7 b' def demo(ui, repo, *args, **opts):'
529 ui.writenoi18n(b'[extensions]\nkeyword =\n')
527 ui.writenoi18n(b'[extensions]\nkeyword =\n')
530 demoitems(b'keyword', ui.configitems(b'keyword'))
528 demoitems(b'keyword', ui.configitems(b'keyword'))
531 demoitems(b'keywordset', ui.configitems(b'keywordset'))
529 demoitems(b'keywordset', ui.configitems(b'keywordset'))
532 demoitems(b'keywordmaps', pycompat.iteritems(kwmaps))
530 demoitems(b'keywordmaps', kwmaps.items())
533 keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n'
531 keywords = b'$' + b'$\n$'.join(sorted(kwmaps.keys())) + b'$\n'
534 repo.wvfs.write(fn, keywords)
532 repo.wvfs.write(fn, keywords)
535 repo[None].add([fn])
533 repo[None].add([fn])
@@ -104,7 +104,6 b' largefile. To add the first largefile to'
104 explicitly do so with the --large flag passed to the :hg:`add`
104 explicitly do so with the --large flag passed to the :hg:`add`
105 command.
105 command.
106 '''
106 '''
107 from __future__ import absolute_import
108
107
109 from mercurial import (
108 from mercurial import (
110 cmdutil,
109 cmdutil,
@@ -7,7 +7,6 b''
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''base class for store implementations and store-related utility code'''
9 '''base class for store implementations and store-related utility code'''
10 from __future__ import absolute_import
11
10
12 from mercurial.i18n import _
11 from mercurial.i18n import _
13
12
@@ -42,7 +41,7 b' class StoreError(Exception):'
42 return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail)
41 return b"%s: %s" % (urlutil.hidepassword(self.url), self.detail)
43
42
44
43
45 class basestore(object):
44 class basestore:
46 def __init__(self, ui, repo, url):
45 def __init__(self, ui, repo, url):
47 self.ui = ui
46 self.ui = ui
48 self.repo = repo
47 self.repo = repo
@@ -7,9 +7,8 b''
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 from __future__ import absolute_import
11
10
12 import errno
11 import binascii
13 import os
12 import os
14 import shutil
13 import shutil
15
14
@@ -385,7 +384,7 b' def _converttags(ui, revmap, data):'
385 continue
384 continue
386 try:
385 try:
387 newid = bin(id)
386 newid = bin(id)
388 except TypeError:
387 except binascii.Error:
389 ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
388 ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
390 continue
389 continue
391 try:
390 try:
@@ -474,10 +473,8 b' def cachelfiles(ui, repo, node, filelist'
474 for lfile in lfiles:
473 for lfile in lfiles:
475 try:
474 try:
476 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
475 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
477 except IOError as err:
476 except FileNotFoundError:
478 if err.errno == errno.ENOENT:
477 continue # node must be None and standin wasn't found in wctx
479 continue # node must be None and standin wasn't found in wctx
480 raise
481 if not lfutil.findfile(repo, expectedhash):
478 if not lfutil.findfile(repo, expectedhash):
482 toget.append((lfile, expectedhash))
479 toget.append((lfile, expectedhash))
483
480
@@ -7,7 +7,6 b''
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''largefiles utility code: must not import other modules in this package.'''
9 '''largefiles utility code: must not import other modules in this package.'''
10 from __future__ import absolute_import
11
10
12 import contextlib
11 import contextlib
13 import copy
12 import copy
@@ -757,7 +756,7 b' def updatestandinsbymatch(repo, match):'
757 return match
756 return match
758
757
759
758
760 class automatedcommithook(object):
759 class automatedcommithook:
761 """Stateful hook to update standins at the 1st commit of resuming
760 """Stateful hook to update standins at the 1st commit of resuming
762
761
763 For efficiency, updating standins in the working directory should
762 For efficiency, updating standins in the working directory should
@@ -7,7 +7,6 b''
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''store class for local filesystem'''
9 '''store class for local filesystem'''
10 from __future__ import absolute_import
11
10
12 from mercurial.i18n import _
11 from mercurial.i18n import _
13 from mercurial.pycompat import open
12 from mercurial.pycompat import open
@@ -7,7 +7,6 b''
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 from __future__ import absolute_import
11
10
12 import copy
11 import copy
13 import os
12 import os
@@ -493,7 +492,7 b' def overridedebugstate(orig, ui, repo, *'
493 large = opts.pop('large', False)
492 large = opts.pop('large', False)
494 if large:
493 if large:
495
494
496 class fakerepo(object):
495 class fakerepo:
497 dirstate = lfutil.openlfdirstate(ui, repo)
496 dirstate = lfutil.openlfdirstate(ui, repo)
498
497
499 orig(ui, fakerepo, *pats, **opts)
498 orig(ui, fakerepo, *pats, **opts)
@@ -714,7 +713,7 b' def copiespathcopies(orig, ctx1, ctx2, m'
714 copies = orig(ctx1, ctx2, match=match)
713 copies = orig(ctx1, ctx2, match=match)
715 updated = {}
714 updated = {}
716
715
717 for k, v in pycompat.iteritems(copies):
716 for k, v in copies.items():
718 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
717 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
719
718
720 return updated
719 return updated
@@ -2,7 +2,6 b''
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5 from __future__ import absolute_import
6
5
7 import os
6 import os
8
7
@@ -5,13 +5,11 b''
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6
6
7 '''remote largefile store; the base class for wirestore'''
7 '''remote largefile store; the base class for wirestore'''
8 from __future__ import absolute_import
9
8
10 from mercurial.i18n import _
9 from mercurial.i18n import _
11
10
12 from mercurial import (
11 from mercurial import (
13 error,
12 error,
14 pycompat,
15 util,
13 util,
16 )
14 )
17
15
@@ -53,9 +51,8 b' class remotestore(basestore.basestore):'
53 def exists(self, hashes):
51 def exists(self, hashes):
54 return {
52 return {
55 h: s == 0
53 h: s == 0
56 for (h, s) in pycompat.iteritems(
54 for (h, s) in self._stat(hashes).items()
57 self._stat(hashes)
55 # dict-from-generator
58 ) # dict-from-generator
59 }
56 }
60
57
61 def sendfile(self, filename, hash):
58 def sendfile(self, filename, hash):
@@ -7,7 +7,6 b''
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''setup for largefiles repositories: reposetup'''
9 '''setup for largefiles repositories: reposetup'''
10 from __future__ import absolute_import
11
10
12 import copy
11 import copy
13
12
@@ -1,7 +1,6 b''
1 # This software may be used and distributed according to the terms of the
1 # This software may be used and distributed according to the terms of the
2 # GNU General Public License version 2 or any later version.
2 # GNU General Public License version 2 or any later version.
3
3
4 from __future__ import absolute_import
5
4
6 import re
5 import re
7
6
@@ -4,7 +4,6 b''
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''largefile store working over Mercurial's wire protocol'''
6 '''largefile store working over Mercurial's wire protocol'''
7 from __future__ import absolute_import
8
7
9 from . import (
8 from . import (
10 lfutil,
9 lfutil,
@@ -120,7 +120,6 b' Configs::'
120 usercache = /path/to/global/cache
120 usercache = /path/to/global/cache
121 """
121 """
122
122
123 from __future__ import absolute_import
124
123
125 import sys
124 import sys
126
125
@@ -400,7 +399,7 b' def lfsfiles(context, mapping):'
400 def pointer(v):
399 def pointer(v):
401 # In the file spec, version is first and the other keys are sorted.
400 # In the file spec, version is first and the other keys are sorted.
402 sortkeyfunc = lambda x: (x[0] != b'version', x)
401 sortkeyfunc = lambda x: (x[0] != b'version', x)
403 items = sorted(pycompat.iteritems(pointers[v]), key=sortkeyfunc)
402 items = sorted(pointers[v].items(), key=sortkeyfunc)
404 return util.sortdict(items)
403 return util.sortdict(items)
405
404
406 makemap = lambda v: {
405 makemap = lambda v: {
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import contextlib
9 import contextlib
11 import errno
10 import errno
@@ -109,7 +108,7 b' class lfsuploadfile(httpconnectionmod.ht'
109 return None # progress is handled by the worker client
108 return None # progress is handled by the worker client
110
109
111
110
112 class local(object):
111 class local:
113 """Local blobstore for large file contents.
112 """Local blobstore for large file contents.
114
113
115 This blobstore is used both as a cache and as a staging area for large blobs
114 This blobstore is used both as a cache and as a staging area for large blobs
@@ -274,7 +273,7 b' def _urlerrorreason(urlerror):'
274 except (AttributeError, IndexError):
273 except (AttributeError, IndexError):
275 # it might be anything, for example a string
274 # it might be anything, for example a string
276 reason = inst.reason
275 reason = inst.reason
277 if isinstance(reason, pycompat.unicode):
276 if isinstance(reason, str):
278 # SSLError of Python 2.7.9 contains a unicode
277 # SSLError of Python 2.7.9 contains a unicode
279 reason = encoding.unitolocal(reason)
278 reason = encoding.unitolocal(reason)
280 return reason
279 return reason
@@ -307,7 +306,7 b' class lfsauthhandler(util.urlreq.basehan'
307 return None
306 return None
308
307
309
308
310 class _gitlfsremote(object):
309 class _gitlfsremote:
311 def __init__(self, repo, url):
310 def __init__(self, repo, url):
312 ui = repo.ui
311 ui = repo.ui
313 self.ui = ui
312 self.ui = ui
@@ -407,7 +406,7 b' class _gitlfsremote(object):'
407 )
406 )
408
407
409 def encodestr(x):
408 def encodestr(x):
410 if isinstance(x, pycompat.unicode):
409 if isinstance(x, str):
411 return x.encode('utf-8')
410 return x.encode('utf-8')
412 return x
411 return x
413
412
@@ -643,7 +642,7 b' class _gitlfsremote(object):'
643 getattr(h, "close_all", lambda: None)()
642 getattr(h, "close_all", lambda: None)()
644
643
645
644
646 class _dummyremote(object):
645 class _dummyremote:
647 """Dummy store storing blobs to temp directory."""
646 """Dummy store storing blobs to temp directory."""
648
647
649 def __init__(self, repo, url):
648 def __init__(self, repo, url):
@@ -662,7 +661,7 b' class _dummyremote(object):'
662 tostore.download(p.oid(), fp, None)
661 tostore.download(p.oid(), fp, None)
663
662
664
663
665 class _nullremote(object):
664 class _nullremote:
666 """Null store storing blobs to /dev/null."""
665 """Null store storing blobs to /dev/null."""
667
666
668 def __init__(self, repo, url):
667 def __init__(self, repo, url):
@@ -675,7 +674,7 b' class _nullremote(object):'
675 pass
674 pass
676
675
677
676
678 class _promptremote(object):
677 class _promptremote:
679 """Prompt user to set lfs.url when accessed."""
678 """Prompt user to set lfs.url when accessed."""
680
679
681 def __init__(self, repo, url):
680 def __init__(self, repo, url):
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import re
9 import re
11
10
@@ -41,7 +40,7 b' class gitlfspointer(dict):'
41
40
42 def serialize(self):
41 def serialize(self):
43 sortkeyfunc = lambda x: (x[0] != b'version', x)
42 sortkeyfunc = lambda x: (x[0] != b'version', x)
44 items = sorted(pycompat.iteritems(self.validate()), key=sortkeyfunc)
43 items = sorted(self.validate().items(), key=sortkeyfunc)
45 return b''.join(b'%s %s\n' % (k, v) for k, v in items)
44 return b''.join(b'%s %s\n' % (k, v) for k, v in items)
46
45
47 def oid(self):
46 def oid(self):
@@ -63,7 +62,7 b' class gitlfspointer(dict):'
63 def validate(self):
62 def validate(self):
64 """raise InvalidPointer on error. return self if there is no error"""
63 """raise InvalidPointer on error. return self if there is no error"""
65 requiredcount = 0
64 requiredcount = 0
66 for k, v in pycompat.iteritems(self):
65 for k, v in self.items():
67 if k in self._requiredre:
66 if k in self._requiredre:
68 if not self._requiredre[k].match(v):
67 if not self._requiredre[k].match(v):
69 raise InvalidPointer(
68 raise InvalidPointer(
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import datetime
9 import datetime
11 import errno
10 import errno
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import hashlib
9 import hashlib
11
10
@@ -25,7 +24,6 b' from mercurial import ('
25 exchange,
24 exchange,
26 exthelper,
25 exthelper,
27 localrepo,
26 localrepo,
28 pycompat,
29 revlog,
27 revlog,
30 scmutil,
28 scmutil,
31 util,
29 util,
@@ -143,7 +141,7 b' def writetostore(self, text):'
143
141
144 # translate hg filelog metadata to lfs metadata with "x-hg-" prefix
142 # translate hg filelog metadata to lfs metadata with "x-hg-" prefix
145 if hgmeta is not None:
143 if hgmeta is not None:
146 for k, v in pycompat.iteritems(hgmeta):
144 for k, v in hgmeta.items():
147 metadata[b'x-hg-%s' % k] = v
145 metadata[b'x-hg-%s' % k] = v
148
146
149 rawtext = metadata.serialize()
147 rawtext = metadata.serialize()
@@ -32,7 +32,6 b' not ensure that they exit cleanly.'
32
32
33 """
33 """
34
34
35 from __future__ import absolute_import
36
35
37 import os
36 import os
38
37
@@ -45,7 +44,7 b' from mercurial.utils import procutil'
45 testedwith = b'ships-with-hg-core'
44 testedwith = b'ships-with-hg-core'
46
45
47
46
48 class processlogger(object):
47 class processlogger:
49 """Map log events to external commands
48 """Map log events to external commands
50
49
51 Arguments are passed on as environment variables.
50 Arguments are passed on as environment variables.
@@ -62,9 +62,7 b' This extension used to provide a strip c'
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from __future__ import absolute_import, print_function
65
66
67 import errno
68 import os
66 import os
69 import re
67 import re
70 import shutil
68 import shutil
@@ -151,7 +149,7 b' try:'
151 except KeyError:
149 except KeyError:
152 # note: load is lazy so we could avoid the try-except,
150 # note: load is lazy so we could avoid the try-except,
153 # but I (marmoute) prefer this explicit code.
151 # but I (marmoute) prefer this explicit code.
154 class dummyui(object):
152 class dummyui:
155 def debug(self, msg):
153 def debug(self, msg):
156 pass
154 pass
157
155
@@ -184,7 +182,7 b' def checksubstate(repo, baserev=None):'
184 normname = util.normpath
182 normname = util.normpath
185
183
186
184
187 class statusentry(object):
185 class statusentry:
188 def __init__(self, node, name):
186 def __init__(self, node, name):
189 self.node, self.name = node, name
187 self.node, self.name = node, name
190
188
@@ -294,7 +292,7 b' def insertplainheader(lines, header, val'
294 return lines
292 return lines
295
293
296
294
297 class patchheader(object):
295 class patchheader:
298 def __init__(self, pf, plainmode=False):
296 def __init__(self, pf, plainmode=False):
299 def eatdiff(lines):
297 def eatdiff(lines):
300 while lines:
298 while lines:
@@ -462,7 +460,7 b' class patchheader(object):'
462 the field and a blank line."""
460 the field and a blank line."""
463 if self.message:
461 if self.message:
464 subj = b'subject: ' + self.message[0].lower()
462 subj = b'subject: ' + self.message[0].lower()
465 for i in pycompat.xrange(len(self.comments)):
463 for i in range(len(self.comments)):
466 if subj == self.comments[i].lower():
464 if subj == self.comments[i].lower():
467 del self.comments[i]
465 del self.comments[i]
468 self.message = self.message[2:]
466 self.message = self.message[2:]
@@ -496,7 +494,7 b' class AbortNoCleanup(error.Abort):'
496 pass
494 pass
497
495
498
496
499 class queue(object):
497 class queue:
500 def __init__(self, ui, baseui, path, patchdir=None):
498 def __init__(self, ui, baseui, path, patchdir=None):
501 self.basepath = path
499 self.basepath = path
502 try:
500 try:
@@ -552,19 +550,15 b' class queue(object):'
552 try:
550 try:
553 lines = self.opener.read(self.statuspath).splitlines()
551 lines = self.opener.read(self.statuspath).splitlines()
554 return list(parselines(lines))
552 return list(parselines(lines))
555 except IOError as e:
553 except FileNotFoundError:
556 if e.errno == errno.ENOENT:
554 return []
557 return []
558 raise
559
555
560 @util.propertycache
556 @util.propertycache
561 def fullseries(self):
557 def fullseries(self):
562 try:
558 try:
563 return self.opener.read(self.seriespath).splitlines()
559 return self.opener.read(self.seriespath).splitlines()
564 except IOError as e:
560 except FileNotFoundError:
565 if e.errno == errno.ENOENT:
561 return []
566 return []
567 raise
568
562
569 @util.propertycache
563 @util.propertycache
570 def series(self):
564 def series(self):
@@ -692,9 +686,7 b' class queue(object):'
692 self.activeguards = []
686 self.activeguards = []
693 try:
687 try:
694 guards = self.opener.read(self.guardspath).split()
688 guards = self.opener.read(self.guardspath).split()
695 except IOError as err:
689 except FileNotFoundError:
696 if err.errno != errno.ENOENT:
697 raise
698 guards = []
690 guards = []
699 for i, guard in enumerate(guards):
691 for i, guard in enumerate(guards):
700 bad = self.checkguard(guard)
692 bad = self.checkguard(guard)
@@ -1141,9 +1133,8 b' class queue(object):'
1141 for p in patches:
1133 for p in patches:
1142 try:
1134 try:
1143 os.unlink(self.join(p))
1135 os.unlink(self.join(p))
1144 except OSError as inst:
1136 except FileNotFoundError:
1145 if inst.errno != errno.ENOENT:
1137 pass
1146 raise
1147
1138
1148 qfinished = []
1139 qfinished = []
1149 if numrevs:
1140 if numrevs:
@@ -2025,7 +2016,7 b' class queue(object):'
2025 # we can't copy a file created by the patch itself
2016 # we can't copy a file created by the patch itself
2026 if dst in copies:
2017 if dst in copies:
2027 del copies[dst]
2018 del copies[dst]
2028 for src, dsts in pycompat.iteritems(copies):
2019 for src, dsts in copies.items():
2029 for dst in dsts:
2020 for dst in dsts:
2030 repo.dirstate.copy(src, dst)
2021 repo.dirstate.copy(src, dst)
2031 else:
2022 else:
@@ -2041,7 +2032,7 b' class queue(object):'
2041 # if the patch excludes a modified file, mark that
2032 # if the patch excludes a modified file, mark that
2042 # file with mtime=0 so status can see it.
2033 # file with mtime=0 so status can see it.
2043 mm = []
2034 mm = []
2044 for i in pycompat.xrange(len(m) - 1, -1, -1):
2035 for i in range(len(m) - 1, -1, -1):
2045 if not match1(m[i]):
2036 if not match1(m[i]):
2046 mm.append(m[i])
2037 mm.append(m[i])
2047 del m[i]
2038 del m[i]
@@ -2152,8 +2143,8 b' class queue(object):'
2152 raise error.Abort(_(b"patch queue directory already exists"))
2143 raise error.Abort(_(b"patch queue directory already exists"))
2153 try:
2144 try:
2154 os.mkdir(self.path)
2145 os.mkdir(self.path)
2155 except OSError as inst:
2146 except FileExistsError:
2156 if inst.errno != errno.EEXIST or not create:
2147 if not create:
2157 raise
2148 raise
2158 if create:
2149 if create:
2159 return self.qrepo(create=True)
2150 return self.qrepo(create=True)
@@ -2166,7 +2157,7 b' class queue(object):'
2166 else:
2157 else:
2167 start = self.series.index(patch) + 1
2158 start = self.series.index(patch) + 1
2168 unapplied = []
2159 unapplied = []
2169 for i in pycompat.xrange(start, len(self.series)):
2160 for i in range(start, len(self.series)):
2170 pushable, reason = self.pushable(i)
2161 pushable, reason = self.pushable(i)
2171 if pushable:
2162 if pushable:
2172 unapplied.append((i, self.series[i]))
2163 unapplied.append((i, self.series[i]))
@@ -2211,7 +2202,7 b' class queue(object):'
2211 if not missing:
2202 if not missing:
2212 if self.ui.verbose:
2203 if self.ui.verbose:
2213 idxwidth = len(b"%d" % (start + length - 1))
2204 idxwidth = len(b"%d" % (start + length - 1))
2214 for i in pycompat.xrange(start, start + length):
2205 for i in range(start, start + length):
2215 patch = self.series[i]
2206 patch = self.series[i]
2216 if patch in applied:
2207 if patch in applied:
2217 char, state = b'A', b'applied'
2208 char, state = b'A', b'applied'
@@ -2372,7 +2363,7 b' class queue(object):'
2372 def nextpatch(start):
2363 def nextpatch(start):
2373 if all_patches or start >= len(self.series):
2364 if all_patches or start >= len(self.series):
2374 return start
2365 return start
2375 for i in pycompat.xrange(start, len(self.series)):
2366 for i in range(start, len(self.series)):
2376 p, reason = self.pushable(i)
2367 p, reason = self.pushable(i)
2377 if p:
2368 if p:
2378 return i
2369 return i
@@ -3390,7 +3381,7 b' def guard(ui, repo, *args, **opts):'
3390 raise error.Abort(
3381 raise error.Abort(
3391 _(b'cannot mix -l/--list with options or arguments')
3382 _(b'cannot mix -l/--list with options or arguments')
3392 )
3383 )
3393 for i in pycompat.xrange(len(q.series)):
3384 for i in range(len(q.series)):
3394 status(i)
3385 status(i)
3395 return
3386 return
3396 if not args or args[0][0:1] in b'-+':
3387 if not args or args[0][0:1] in b'-+':
@@ -3768,18 +3759,14 b' def select(ui, repo, *args, **opts):'
3768 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3759 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3769 if args or opts.get(b'none'):
3760 if args or opts.get(b'none'):
3770 old_unapplied = q.unapplied(repo)
3761 old_unapplied = q.unapplied(repo)
3771 old_guarded = [
3762 old_guarded = [i for i in range(len(q.applied)) if not pushable(i)]
3772 i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
3773 ]
3774 q.setactive(args)
3763 q.setactive(args)
3775 q.savedirty()
3764 q.savedirty()
3776 if not args:
3765 if not args:
3777 ui.status(_(b'guards deactivated\n'))
3766 ui.status(_(b'guards deactivated\n'))
3778 if not opts.get(b'pop') and not opts.get(b'reapply'):
3767 if not opts.get(b'pop') and not opts.get(b'reapply'):
3779 unapplied = q.unapplied(repo)
3768 unapplied = q.unapplied(repo)
3780 guarded = [
3769 guarded = [i for i in range(len(q.applied)) if not pushable(i)]
3781 i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
3782 ]
3783 if len(unapplied) != len(old_unapplied):
3770 if len(unapplied) != len(old_unapplied):
3784 ui.status(
3771 ui.status(
3785 _(
3772 _(
@@ -3826,7 +3813,7 b' def select(ui, repo, *args, **opts):'
3826 reapply = opts.get(b'reapply') and q.applied and q.applied[-1].name
3813 reapply = opts.get(b'reapply') and q.applied and q.applied[-1].name
3827 popped = False
3814 popped = False
3828 if opts.get(b'pop') or opts.get(b'reapply'):
3815 if opts.get(b'pop') or opts.get(b'reapply'):
3829 for i in pycompat.xrange(len(q.applied)):
3816 for i in range(len(q.applied)):
3830 if not pushable(i):
3817 if not pushable(i):
3831 ui.status(_(b'popping guarded patches\n'))
3818 ui.status(_(b'popping guarded patches\n'))
3832 popped = True
3819 popped = True
@@ -4288,7 +4275,7 b' def extsetup(ui):'
4288 entry[1].extend(mqopt)
4275 entry[1].extend(mqopt)
4289
4276
4290 def dotable(cmdtable):
4277 def dotable(cmdtable):
4291 for cmd, entry in pycompat.iteritems(cmdtable):
4278 for cmd, entry in cmdtable.items():
4292 cmd = cmdutil.parsealiases(cmd)[0]
4279 cmd = cmdutil.parsealiases(cmd)[0]
4293 func = entry[0]
4280 func = entry[0]
4294 if func.norepo:
4281 if func.norepo:
@@ -6,7 +6,6 b''
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 '''create clones which fetch history data for subset of files (EXPERIMENTAL)'''
7 '''create clones which fetch history data for subset of files (EXPERIMENTAL)'''
8
8
9 from __future__ import absolute_import
10
9
11 from mercurial import (
10 from mercurial import (
12 localrepo,
11 localrepo,
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import errno
9 import errno
11 import struct
10 import struct
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import itertools
8 import itertools
10 import os
9 import os
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from mercurial.i18n import _
9 from mercurial.i18n import _
11 from mercurial import error
10 from mercurial import error
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from mercurial import wireprototypes
9 from mercurial import wireprototypes
11
10
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from mercurial import (
9 from mercurial import (
11 registrar,
10 registrar,
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from mercurial import (
9 from mercurial import (
11 bundle2,
10 bundle2,
@@ -154,7 +154,6 b' web.baseurl'
154 references. See also ``notify.strip``.
154 references. See also ``notify.strip``.
155
155
156 '''
156 '''
157 from __future__ import absolute_import
158
157
159 import email.errors as emailerrors
158 import email.errors as emailerrors
160 import email.utils as emailutils
159 import email.utils as emailutils
@@ -315,7 +314,7 b' deftemplates = {'
315 }
314 }
316
315
317
316
318 class notifier(object):
317 class notifier:
319 '''email notification class.'''
318 '''email notification class.'''
320
319
321 def __init__(self, ui, repo, hooktype):
320 def __init__(self, ui, repo, hooktype):
@@ -466,7 +465,7 b' class notifier(object):'
466 # create fresh mime message from scratch
465 # create fresh mime message from scratch
467 # (multipart templates must take care of this themselves)
466 # (multipart templates must take care of this themselves)
468 headers = msg.items()
467 headers = msg.items()
469 payload = msg.get_payload(decode=pycompat.ispy3)
468 payload = msg.get_payload(decode=True)
470 # for notification prefer readability over data precision
469 # for notification prefer readability over data precision
471 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
470 msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
472 # reinstate custom headers
471 # reinstate custom headers
@@ -525,7 +524,7 b' class notifier(object):'
525 )
524 )
526 msg['To'] = ', '.join(sorted(subs))
525 msg['To'] = ', '.join(sorted(subs))
527
526
528 msgtext = msg.as_bytes() if pycompat.ispy3 else msg.as_string()
527 msgtext = msg.as_bytes()
529 if self.test:
528 if self.test:
530 self.ui.write(msgtext)
529 self.ui.write(msgtext)
531 if not msgtext.endswith(b'\n'):
530 if not msgtext.endswith(b'\n'):
@@ -21,7 +21,6 b' takes precedence over ignore options and'
21 [pager]
21 [pager]
22 attend-cat = false
22 attend-cat = false
23 '''
23 '''
24 from __future__ import absolute_import
25
24
26 from mercurial import (
25 from mercurial import (
27 cmdutil,
26 cmdutil,
@@ -71,13 +71,11 b' specified by --flag option are exported '
71 You can set patchbomb to always ask for confirmation by setting
71 You can set patchbomb to always ask for confirmation by setting
72 ``patchbomb.confirm`` to true.
72 ``patchbomb.confirm`` to true.
73 '''
73 '''
74 from __future__ import absolute_import
75
74
76 import email.encoders as emailencoders
75 import email.encoders as emailencoders
77 import email.mime.base as emimebase
76 import email.mime.base as emimebase
78 import email.mime.multipart as emimemultipart
77 import email.mime.multipart as emimemultipart
79 import email.utils as eutil
78 import email.utils as eutil
80 import errno
81 import os
79 import os
82 import socket
80 import socket
83
81
@@ -985,9 +983,8 b' def email(ui, repo, *revs, **opts):'
985 try:
983 try:
986 generator.flatten(m, False)
984 generator.flatten(m, False)
987 ui.write(b'\n')
985 ui.write(b'\n')
988 except IOError as inst:
986 except BrokenPipeError:
989 if inst.errno != errno.EPIPE:
987 pass
990 raise
991 else:
988 else:
992 if not sendmail:
989 if not sendmail:
993 sendmail = mail.connect(ui, mbox=mbox)
990 sendmail = mail.connect(ui, mbox=mbox)
@@ -57,11 +57,11 b' Config::'
57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
57 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
58 """
58 """
59
59
60 from __future__ import absolute_import
61
60
62 import base64
61 import base64
63 import contextlib
62 import contextlib
64 import hashlib
63 import hashlib
64 import io
65 import itertools
65 import itertools
66 import json
66 import json
67 import mimetypes
67 import mimetypes
@@ -219,9 +219,7 b' def _loadhgrc(orig, ui, wdirvfs, hgvfs, '
219 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
219 rawparams = encoding.unifromlocal(wdirvfs.read(b".arcconfig"))
220 # json.loads only returns unicode strings
220 # json.loads only returns unicode strings
221 arcconfig = pycompat.rapply(
221 arcconfig = pycompat.rapply(
222 lambda x: encoding.unitolocal(x)
222 lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
223 if isinstance(x, pycompat.unicode)
224 else x,
225 pycompat.json_loads(rawparams),
223 pycompat.json_loads(rawparams),
226 )
224 )
227
225
@@ -447,9 +445,7 b' def callconduit(ui, name, params):'
447 time.sleep(retry_interval)
445 time.sleep(retry_interval)
448 ui.debug(b'Conduit Response: %s\n' % body)
446 ui.debug(b'Conduit Response: %s\n' % body)
449 parsed = pycompat.rapply(
447 parsed = pycompat.rapply(
450 lambda x: encoding.unitolocal(x)
448 lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
451 if isinstance(x, pycompat.unicode)
452 else x,
453 # json.loads only accepts bytes from py3.6+
449 # json.loads only accepts bytes from py3.6+
454 pycompat.json_loads(encoding.unifromlocal(body)),
450 pycompat.json_loads(encoding.unifromlocal(body)),
455 )
451 )
@@ -473,9 +469,7 b' def debugcallconduit(ui, repo, name):'
473 rawparams = encoding.unifromlocal(ui.fin.read())
469 rawparams = encoding.unifromlocal(ui.fin.read())
474 # json.loads only returns unicode strings
470 # json.loads only returns unicode strings
475 params = pycompat.rapply(
471 params = pycompat.rapply(
476 lambda x: encoding.unitolocal(x)
472 lambda x: encoding.unitolocal(x) if isinstance(x, str) else x,
477 if isinstance(x, pycompat.unicode)
478 else x,
479 pycompat.json_loads(rawparams),
473 pycompat.json_loads(rawparams),
480 )
474 )
481 # json.dumps only accepts unicode strings
475 # json.dumps only accepts unicode strings
@@ -674,7 +668,7 b' def getdiff(basectx, ctx, diffopts):'
674 return output.getvalue()
668 return output.getvalue()
675
669
676
670
677 class DiffChangeType(object):
671 class DiffChangeType:
678 ADD = 1
672 ADD = 1
679 CHANGE = 2
673 CHANGE = 2
680 DELETE = 3
674 DELETE = 3
@@ -685,7 +679,7 b' class DiffChangeType(object):'
685 MULTICOPY = 8
679 MULTICOPY = 8
686
680
687
681
688 class DiffFileType(object):
682 class DiffFileType:
689 TEXT = 1
683 TEXT = 1
690 IMAGE = 2
684 IMAGE = 2
691 BINARY = 3
685 BINARY = 3
@@ -706,7 +700,7 b' class phabhunk(dict):'
706
700
707
701
708 @attr.s
702 @attr.s
709 class phabchange(object):
703 class phabchange:
710 """Represents a Differential change, owns Differential hunks and owned by a
704 """Represents a Differential change, owns Differential hunks and owned by a
711 Differential diff. Each one represents one file in a diff.
705 Differential diff. Each one represents one file in a diff.
712 """
706 """
@@ -747,7 +741,7 b' class phabchange(object):'
747
741
748
742
749 @attr.s
743 @attr.s
750 class phabdiff(object):
744 class phabdiff:
751 """Represents a Differential diff, owns Differential changes. Corresponds
745 """Represents a Differential diff, owns Differential changes. Corresponds
752 to a commit.
746 to a commit.
753 """
747 """
@@ -2200,7 +2194,7 b' def phabimport(ui, repo, *specs, **opts)'
2200 for drev, contents in patches:
2194 for drev, contents in patches:
2201 ui.status(_(b'applying patch from D%s\n') % drev)
2195 ui.status(_(b'applying patch from D%s\n') % drev)
2202
2196
2203 with patch.extract(ui, pycompat.bytesio(contents)) as patchdata:
2197 with patch.extract(ui, io.BytesIO(contents)) as patchdata:
2204 msg, node, rej = cmdutil.tryimportone(
2198 msg, node, rej = cmdutil.tryimportone(
2205 ui,
2199 ui,
2206 repo,
2200 repo,
@@ -2279,7 +2273,7 b' def phabupdate(ui, repo, *specs, **opts)'
2279 drevmap = getdrevmap(repo, logcmdutil.revrange(repo, [revs]))
2273 drevmap = getdrevmap(repo, logcmdutil.revrange(repo, [revs]))
2280 specs = []
2274 specs = []
2281 unknown = []
2275 unknown = []
2282 for r, d in pycompat.iteritems(drevmap):
2276 for r, d in drevmap.items():
2283 if d is None:
2277 if d is None:
2284 unknown.append(repo[r])
2278 unknown.append(repo[r])
2285 else:
2279 else:
@@ -2364,7 +2358,7 b' def phabstatusshowview(ui, repo, display'
2364 revs = repo.revs('sort(_underway(), topo)')
2358 revs = repo.revs('sort(_underway(), topo)')
2365 drevmap = getdrevmap(repo, revs)
2359 drevmap = getdrevmap(repo, revs)
2366 unknownrevs, drevids, revsbydrevid = [], set(), {}
2360 unknownrevs, drevids, revsbydrevid = [], set(), {}
2367 for rev, drevid in pycompat.iteritems(drevmap):
2361 for rev, drevid in drevmap.items():
2368 if drevid is not None:
2362 if drevid is not None:
2369 drevids.add(drevid)
2363 drevids.add(drevid)
2370 revsbydrevid.setdefault(drevid, set()).add(rev)
2364 revsbydrevid.setdefault(drevid, set()).add(rev)
@@ -14,9 +14,7 b' For more information:'
14 https://mercurial-scm.org/wiki/RebaseExtension
14 https://mercurial-scm.org/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from __future__ import absolute_import
18
17
19 import errno
20 import os
18 import os
21
19
22 from mercurial.i18n import _
20 from mercurial.i18n import _
@@ -160,7 +158,7 b' def _ctxdesc(ctx):'
160 )
158 )
161
159
162
160
163 class rebaseruntime(object):
161 class rebaseruntime:
164 """This class is a container for rebase runtime state"""
162 """This class is a container for rebase runtime state"""
165
163
166 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
164 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
@@ -244,7 +242,7 b' class rebaseruntime(object):'
244 f.write(b'%d\n' % int(self.keepbranchesf))
242 f.write(b'%d\n' % int(self.keepbranchesf))
245 f.write(b'%s\n' % (self.activebookmark or b''))
243 f.write(b'%s\n' % (self.activebookmark or b''))
246 destmap = self.destmap
244 destmap = self.destmap
247 for d, v in pycompat.iteritems(self.state):
245 for d, v in self.state.items():
248 oldrev = repo[d].hex()
246 oldrev = repo[d].hex()
249 if v >= 0:
247 if v >= 0:
250 newrev = repo[v].hex()
248 newrev = repo[v].hex()
@@ -506,7 +504,7 b' class rebaseruntime(object):'
506 # commits.
504 # commits.
507 self.storestatus(tr)
505 self.storestatus(tr)
508
506
509 cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
507 cands = [k for k, v in self.state.items() if v == revtodo]
510 p = repo.ui.makeprogress(
508 p = repo.ui.makeprogress(
511 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
509 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
512 )
510 )
@@ -1337,7 +1335,7 b' def _definedestmap(ui, repo, inmemory, d'
1337 # emulate the old behavior, showing "nothing to rebase" (a better
1335 # emulate the old behavior, showing "nothing to rebase" (a better
1338 # behavior may be abort with "cannot find branching point" error)
1336 # behavior may be abort with "cannot find branching point" error)
1339 bpbase.clear()
1337 bpbase.clear()
1340 for bp, bs in pycompat.iteritems(bpbase): # calculate roots
1338 for bp, bs in bpbase.items(): # calculate roots
1341 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1339 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1342
1340
1343 rebaseset = repo.revs(b'%ld::', roots)
1341 rebaseset = repo.revs(b'%ld::', roots)
@@ -1941,9 +1939,7 b' def restorecollapsemsg(repo, isabort):'
1941 f = repo.vfs(b"last-message.txt")
1939 f = repo.vfs(b"last-message.txt")
1942 collapsemsg = f.readline().strip()
1940 collapsemsg = f.readline().strip()
1943 f.close()
1941 f.close()
1944 except IOError as err:
1942 except FileNotFoundError:
1945 if err.errno != errno.ENOENT:
1946 raise
1947 if isabort:
1943 if isabort:
1948 # Oh well, just abort like normal
1944 # Oh well, just abort like normal
1949 collapsemsg = b''
1945 collapsemsg = b''
@@ -2104,7 +2100,7 b' def clearrebased('
2104 fl = fm.formatlist
2100 fl = fm.formatlist
2105 fd = fm.formatdict
2101 fd = fm.formatdict
2106 changes = {}
2102 changes = {}
2107 for oldns, newn in pycompat.iteritems(replacements):
2103 for oldns, newn in replacements.items():
2108 for oldn in oldns:
2104 for oldn in oldns:
2109 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2105 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2110 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2106 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
@@ -2258,7 +2254,7 b' def summaryhook(ui, repo):'
2258 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2254 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2259 ui.write(msg)
2255 ui.write(msg)
2260 return
2256 return
2261 numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
2257 numrebased = len([i for i in state.values() if i >= 0])
2262 # i18n: column positioning for "hg summary"
2258 # i18n: column positioning for "hg summary"
2263 ui.write(
2259 ui.write(
2264 _(b'rebase: %s, %s (rebase --continue)\n')
2260 _(b'rebase: %s, %s (rebase --continue)\n')
@@ -10,7 +10,6 b''
10 The feature provided by this extension has been moved into core Mercurial as
10 The feature provided by this extension has been moved into core Mercurial as
11 :hg:`commit --interactive`.'''
11 :hg:`commit --interactive`.'''
12
12
13 from __future__ import absolute_import
14
13
15 from mercurial.i18n import _
14 from mercurial.i18n import _
16 from mercurial import (
15 from mercurial import (
@@ -11,10 +11,8 b' The :hg:`releasenotes` command provided '
11 process simpler by automating it.
11 process simpler by automating it.
12 """
12 """
13
13
14 from __future__ import absolute_import
15
14
16 import difflib
15 import difflib
17 import errno
18 import re
16 import re
19
17
20 from mercurial.i18n import _
18 from mercurial.i18n import _
@@ -78,7 +76,7 b" RE_ISSUE = br'\\bissue ?[0-9]{4,6}(?![0-9"
78 BULLET_SECTION = _(b'Other Changes')
76 BULLET_SECTION = _(b'Other Changes')
79
77
80
78
81 class parsedreleasenotes(object):
79 class parsedreleasenotes:
82 def __init__(self):
80 def __init__(self):
83 self.sections = {}
81 self.sections = {}
84
82
@@ -171,14 +169,14 b' class parsedreleasenotes(object):'
171 self.addnontitleditem(section, paragraphs)
169 self.addnontitleditem(section, paragraphs)
172
170
173
171
174 class releasenotessections(object):
172 class releasenotessections:
175 def __init__(self, ui, repo=None):
173 def __init__(self, ui, repo=None):
176 if repo:
174 if repo:
177 sections = util.sortdict(DEFAULT_SECTIONS)
175 sections = util.sortdict(DEFAULT_SECTIONS)
178 custom_sections = getcustomadmonitions(repo)
176 custom_sections = getcustomadmonitions(repo)
179 if custom_sections:
177 if custom_sections:
180 sections.update(custom_sections)
178 sections.update(custom_sections)
181 self._sections = list(pycompat.iteritems(sections))
179 self._sections = list(sections.items())
182 else:
180 else:
183 self._sections = list(DEFAULT_SECTIONS)
181 self._sections = list(DEFAULT_SECTIONS)
184
182
@@ -689,10 +687,7 b' def releasenotes(ui, repo, file_=None, *'
689 try:
687 try:
690 with open(file_, b'rb') as fh:
688 with open(file_, b'rb') as fh:
691 notes = parsereleasenotesfile(sections, fh.read())
689 notes = parsereleasenotesfile(sections, fh.read())
692 except IOError as e:
690 except FileNotFoundError:
693 if e.errno != errno.ENOENT:
694 raise
695
696 notes = parsedreleasenotes()
691 notes = parsedreleasenotes()
697
692
698 notes.merge(ui, incoming)
693 notes.merge(ui, incoming)
@@ -6,7 +6,6 b''
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """recreates hardlinks between repository clones"""
8 """recreates hardlinks between repository clones"""
9 from __future__ import absolute_import
10
9
11 import os
10 import os
12 import stat
11 import stat
@@ -124,7 +124,6 b' Configs:'
124 corruption before returning metadata
124 corruption before returning metadata
125
125
126 """
126 """
127 from __future__ import absolute_import
128
127
129 import os
128 import os
130 import time
129 import time
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import collections
1 import collections
4 import errno
2 import errno
5 import mmap
3 import mmap
@@ -15,7 +13,6 b' from mercurial.pycompat import ('
15 from mercurial.node import hex
13 from mercurial.node import hex
16 from mercurial import (
14 from mercurial import (
17 policy,
15 policy,
18 pycompat,
19 util,
16 util,
20 vfs as vfsmod,
17 vfs as vfsmod,
21 )
18 )
@@ -56,16 +53,8 b' SMALLFANOUTCUTOFF = 2 ** 16 // 8'
56 # loaded the pack list.
53 # loaded the pack list.
57 REFRESHRATE = 0.1
54 REFRESHRATE = 0.1
58
55
59 if pycompat.isposix and not pycompat.ispy3:
60 # With glibc 2.7+ the 'e' flag uses O_CLOEXEC when opening.
61 # The 'e' flag will be ignored on older versions of glibc.
62 # Python 3 can't handle the 'e' flag.
63 PACKOPENMODE = b'rbe'
64 else:
65 PACKOPENMODE = b'rb'
66
56
67
57 class _cachebackedpacks:
68 class _cachebackedpacks(object):
69 def __init__(self, packs, cachesize):
58 def __init__(self, packs, cachesize):
70 self._packs = set(packs)
59 self._packs = set(packs)
71 self._lrucache = util.lrucachedict(cachesize)
60 self._lrucache = util.lrucachedict(cachesize)
@@ -111,7 +100,7 b' class _cachebackedpacks(object):'
111 self._lastpack = None
100 self._lastpack = None
112
101
113
102
114 class basepackstore(object):
103 class basepackstore:
115 # Default cache size limit for the pack files.
104 # Default cache size limit for the pack files.
116 DEFAULTCACHESIZE = 100
105 DEFAULTCACHESIZE = 100
117
106
@@ -177,9 +166,8 b' class basepackstore(object):'
177 )
166 )
178 else:
167 else:
179 ids.add(id)
168 ids.add(id)
180 except OSError as ex:
169 except FileNotFoundError:
181 if ex.errno != errno.ENOENT:
170 pass
182 raise
183
171
184 def _getavailablepackfilessorted(self):
172 def _getavailablepackfilessorted(self):
185 """Like `_getavailablepackfiles`, but also sorts the files by mtime,
173 """Like `_getavailablepackfiles`, but also sorts the files by mtime,
@@ -269,7 +257,7 b' class basepackstore(object):'
269 return newpacks
257 return newpacks
270
258
271
259
272 class versionmixin(object):
260 class versionmixin:
273 # Mix-in for classes with multiple supported versions
261 # Mix-in for classes with multiple supported versions
274 VERSION = None
262 VERSION = None
275 SUPPORTED_VERSIONS = [2]
263 SUPPORTED_VERSIONS = [2]
@@ -320,7 +308,7 b' class basepack(versionmixin):'
320 params = self.params
308 params = self.params
321 rawfanout = self._index[FANOUTSTART : FANOUTSTART + params.fanoutsize]
309 rawfanout = self._index[FANOUTSTART : FANOUTSTART + params.fanoutsize]
322 fanouttable = []
310 fanouttable = []
323 for i in pycompat.xrange(0, params.fanoutcount):
311 for i in range(0, params.fanoutcount):
324 loc = i * 4
312 loc = i * 4
325 fanoutentry = struct.unpack(b'!I', rawfanout[loc : loc + 4])[0]
313 fanoutentry = struct.unpack(b'!I', rawfanout[loc : loc + 4])[0]
326 fanouttable.append(fanoutentry)
314 fanouttable.append(fanoutentry)
@@ -345,12 +333,12 b' class basepack(versionmixin):'
345 self._data.close()
333 self._data.close()
346
334
347 # TODO: use an opener/vfs to access these paths
335 # TODO: use an opener/vfs to access these paths
348 with open(self.indexpath, PACKOPENMODE) as indexfp:
336 with open(self.indexpath, b'rb') as indexfp:
349 # memory-map the file, size 0 means whole file
337 # memory-map the file, size 0 means whole file
350 self._index = mmap.mmap(
338 self._index = mmap.mmap(
351 indexfp.fileno(), 0, access=mmap.ACCESS_READ
339 indexfp.fileno(), 0, access=mmap.ACCESS_READ
352 )
340 )
353 with open(self.packpath, PACKOPENMODE) as datafp:
341 with open(self.packpath, b'rb') as datafp:
354 self._data = mmap.mmap(datafp.fileno(), 0, access=mmap.ACCESS_READ)
342 self._data = mmap.mmap(datafp.fileno(), 0, access=mmap.ACCESS_READ)
355
343
356 self._pagedin = 0
344 self._pagedin = 0
@@ -528,7 +516,7 b' class mutablebasepack(versionmixin):'
528 self.idxfp.write(struct.pack(b'!BB', self.VERSION, config))
516 self.idxfp.write(struct.pack(b'!BB', self.VERSION, config))
529
517
530
518
531 class indexparams(object):
519 class indexparams:
532 __slots__ = (
520 __slots__ = (
533 'fanoutprefix',
521 'fanoutprefix',
534 'fanoutstruct',
522 'fanoutstruct',
@@ -1,6 +1,3 b''
1 from __future__ import absolute_import
2
3 import errno
4 import os
1 import os
5 import shutil
2 import shutil
6 import stat
3 import stat
@@ -21,7 +18,7 b' from . import ('
21 )
18 )
22
19
23
20
24 class basestore(object):
21 class basestore:
25 def __init__(self, repo, path, reponame, shared=False):
22 def __init__(self, repo, path, reponame, shared=False):
26 """Creates a remotefilelog store object for the given repo name.
23 """Creates a remotefilelog store object for the given repo name.
27
24
@@ -148,7 +145,7 b' class basestore(object):'
148
145
149 filenamemap = self._resolvefilenames(existing.keys())
146 filenamemap = self._resolvefilenames(existing.keys())
150
147
151 for filename, sha in pycompat.iteritems(filenamemap):
148 for filename, sha in filenamemap.items():
152 yield (filename, existing[sha])
149 yield (filename, existing[sha])
153
150
154 def _resolvefilenames(self, hashes):
151 def _resolvefilenames(self, hashes):
@@ -173,7 +170,7 b' class basestore(object):'
173
170
174 # Scan the changelog until we've found every file name
171 # Scan the changelog until we've found every file name
175 cl = self.repo.unfiltered().changelog
172 cl = self.repo.unfiltered().changelog
176 for rev in pycompat.xrange(len(cl) - 1, -1, -1):
173 for rev in range(len(cl) - 1, -1, -1):
177 if not missingfilename:
174 if not missingfilename:
178 break
175 break
179 files = cl.readfiles(cl.node(rev))
176 files = cl.readfiles(cl.node(rev))
@@ -346,10 +343,7 b' class basestore(object):'
346 count += 1
343 count += 1
347 try:
344 try:
348 pathstat = os.stat(path)
345 pathstat = os.stat(path)
349 except OSError as e:
346 except FileNotFoundError:
350 # errno.ENOENT = no such file or directory
351 if e.errno != errno.ENOENT:
352 raise
353 msg = _(
347 msg = _(
354 b"warning: file %s was removed by another process\n"
348 b"warning: file %s was removed by another process\n"
355 )
349 )
@@ -364,10 +358,7 b' class basestore(object):'
364 else:
358 else:
365 try:
359 try:
366 shallowutil.unlinkfile(path)
360 shallowutil.unlinkfile(path)
367 except OSError as e:
361 except FileNotFoundError:
368 # errno.ENOENT = no such file or directory
369 if e.errno != errno.ENOENT:
370 raise
371 msg = _(
362 msg = _(
372 b"warning: file %s was removed by another "
363 b"warning: file %s was removed by another "
373 b"process\n"
364 b"process\n"
@@ -390,10 +381,7 b' class basestore(object):'
390 atime, oldpath, oldpathstat = queue.get()
381 atime, oldpath, oldpathstat = queue.get()
391 try:
382 try:
392 shallowutil.unlinkfile(oldpath)
383 shallowutil.unlinkfile(oldpath)
393 except OSError as e:
384 except FileNotFoundError:
394 # errno.ENOENT = no such file or directory
395 if e.errno != errno.ENOENT:
396 raise
397 msg = _(
385 msg = _(
398 b"warning: file %s was removed by another process\n"
386 b"warning: file %s was removed by another process\n"
399 )
387 )
@@ -414,7 +402,7 b' class basestore(object):'
414 )
402 )
415
403
416
404
417 class baseunionstore(object):
405 class baseunionstore:
418 def __init__(self, *args, **kwargs):
406 def __init__(self, *args, **kwargs):
419 # If one of the functions that iterates all of the stores is about to
407 # If one of the functions that iterates all of the stores is about to
420 # throw a KeyError, try this many times with a full refresh between
408 # throw a KeyError, try this many times with a full refresh between
@@ -5,11 +5,9 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from mercurial import (
9 from mercurial import (
11 hg,
10 hg,
12 pycompat,
13 sshpeer,
11 sshpeer,
14 util,
12 util,
15 )
13 )
@@ -17,7 +15,7 b' from mercurial import ('
17 _sshv1peer = sshpeer.sshv1peer
15 _sshv1peer = sshpeer.sshv1peer
18
16
19
17
20 class connectionpool(object):
18 class connectionpool:
21 def __init__(self, repo):
19 def __init__(self, repo):
22 self._repo = repo
20 self._repo = repo
23 self._pool = dict()
21 self._pool = dict()
@@ -61,13 +59,13 b' class connectionpool(object):'
61 return conn
59 return conn
62
60
63 def close(self):
61 def close(self):
64 for pathpool in pycompat.itervalues(self._pool):
62 for pathpool in self._pool.values():
65 for conn in pathpool:
63 for conn in pathpool:
66 conn.close()
64 conn.close()
67 del pathpool[:]
65 del pathpool[:]
68
66
69
67
70 class connection(object):
68 class connection:
71 def __init__(self, pool, peer):
69 def __init__(self, pool, peer):
72 self._pool = pool
70 self._pool = pool
73 self.peer = peer
71 self.peer = peer
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import struct
1 import struct
4
2
5 from mercurial.i18n import _
3 from mercurial.i18n import _
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import threading
1 import threading
4
2
5 from mercurial.node import (
3 from mercurial.node import (
@@ -9,7 +7,6 b' from mercurial.node import ('
9 from mercurial.pycompat import getattr
7 from mercurial.pycompat import getattr
10 from mercurial import (
8 from mercurial import (
11 mdiff,
9 mdiff,
12 pycompat,
13 revlog,
10 revlog,
14 )
11 )
15 from . import (
12 from . import (
@@ -19,7 +16,7 b' from . import ('
19 )
16 )
20
17
21
18
22 class ChainIndicies(object):
19 class ChainIndicies:
23 """A static class for easy reference to the delta chain indicies."""
20 """A static class for easy reference to the delta chain indicies."""
24
21
25 # The filename of this revision delta
22 # The filename of this revision delta
@@ -231,7 +228,7 b' class remotefilelogcontentstore(basestor'
231 self._threaddata.metacache = (node, meta)
228 self._threaddata.metacache = (node, meta)
232
229
233
230
234 class remotecontentstore(object):
231 class remotecontentstore:
235 def __init__(self, ui, fileservice, shared):
232 def __init__(self, ui, fileservice, shared):
236 self._fileservice = fileservice
233 self._fileservice = fileservice
237 # type(shared) is usually remotefilelogcontentstore
234 # type(shared) is usually remotefilelogcontentstore
@@ -276,7 +273,7 b' class remotecontentstore(object):'
276 pass
273 pass
277
274
278
275
279 class manifestrevlogstore(object):
276 class manifestrevlogstore:
280 def __init__(self, repo):
277 def __init__(self, repo):
281 self._store = repo.store
278 self._store = repo.store
282 self._svfs = repo.svfs
279 self._svfs = repo.svfs
@@ -368,7 +365,7 b' class manifestrevlogstore(object):'
368 rl = revlog.revlog(self._svfs, radix=b'00manifesttree')
365 rl = revlog.revlog(self._svfs, radix=b'00manifesttree')
369 startlinkrev = self._repackstartlinkrev
366 startlinkrev = self._repackstartlinkrev
370 endlinkrev = self._repackendlinkrev
367 endlinkrev = self._repackendlinkrev
371 for rev in pycompat.xrange(len(rl) - 1, -1, -1):
368 for rev in range(len(rl) - 1, -1, -1):
372 linkrev = rl.linkrev(rev)
369 linkrev = rl.linkrev(rev)
373 if linkrev < startlinkrev:
370 if linkrev < startlinkrev:
374 break
371 break
@@ -385,7 +382,7 b' class manifestrevlogstore(object):'
385 treename = path[5 : -len(b'/00manifest')]
382 treename = path[5 : -len(b'/00manifest')]
386
383
387 rl = revlog.revlog(self._svfs, indexfile=path[:-2])
384 rl = revlog.revlog(self._svfs, indexfile=path[:-2])
388 for rev in pycompat.xrange(len(rl) - 1, -1, -1):
385 for rev in range(len(rl) - 1, -1, -1):
389 linkrev = rl.linkrev(rev)
386 linkrev = rl.linkrev(rev)
390 if linkrev < startlinkrev:
387 if linkrev < startlinkrev:
391 break
388 break
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import struct
1 import struct
4 import zlib
2 import zlib
5
3
@@ -9,7 +7,6 b' from mercurial.node import ('
9 )
7 )
10 from mercurial.i18n import _
8 from mercurial.i18n import _
11 from mercurial import (
9 from mercurial import (
12 pycompat,
13 util,
10 util,
14 )
11 )
15 from . import (
12 from . import (
@@ -234,7 +231,7 b' class datapack(basepack.basepack):'
234
231
235 # Scan forward to find the first non-same entry, which is the upper
232 # Scan forward to find the first non-same entry, which is the upper
236 # bound.
233 # bound.
237 for i in pycompat.xrange(fanoutkey + 1, params.fanoutcount):
234 for i in range(fanoutkey + 1, params.fanoutcount):
238 end = fanout[i] + params.indexstart
235 end = fanout[i] + params.indexstart
239 if end != start:
236 if end != start:
240 break
237 break
@@ -455,7 +452,7 b' class mutabledatapack(basepack.mutableba'
455
452
456 def createindex(self, nodelocations, indexoffset):
453 def createindex(self, nodelocations, indexoffset):
457 entries = sorted(
454 entries = sorted(
458 (n, db, o, s) for n, (db, o, s) in pycompat.iteritems(self.entries)
455 (n, db, o, s) for n, (db, o, s) in self.entries.items()
459 )
456 )
460
457
461 rawindex = b''
458 rawindex = b''
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import os
8 import os
10 import zlib
9 import zlib
@@ -82,7 +81,7 b' def buildtemprevlog(repo, file):'
82 os.remove(temppath)
81 os.remove(temppath)
83 r = filelog.filelog(repo.svfs, b'temprevlog')
82 r = filelog.filelog(repo.svfs, b'temprevlog')
84
83
85 class faket(object):
84 class faket:
86 def add(self, a, b, c):
85 def add(self, a, b, c):
87 pass
86 pass
88
87
@@ -211,7 +210,7 b' def verifyremotefilelog(ui, path, **opts'
211 continue
210 continue
212 filepath = os.path.join(root, file)
211 filepath = os.path.join(root, file)
213 size, firstnode, mapping = parsefileblob(filepath, decompress)
212 size, firstnode, mapping = parsefileblob(filepath, decompress)
214 for p1, p2, linknode, copyfrom in pycompat.itervalues(mapping):
213 for p1, p2, linknode, copyfrom in mapping.values():
215 if linknode == sha1nodeconstants.nullid:
214 if linknode == sha1nodeconstants.nullid:
216 actualpath = os.path.relpath(root, path)
215 actualpath = os.path.relpath(root, path)
217 key = fileserverclient.getcachekey(
216 key = fileserverclient.getcachekey(
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import io
9 import io
11 import os
10 import os
@@ -140,7 +139,7 b' def peersetup(ui, peer):'
140 peer.__class__ = remotefilepeer
139 peer.__class__ = remotefilepeer
141
140
142
141
143 class cacheconnection(object):
142 class cacheconnection:
144 """The connection for communicating with the remote cache. Performs
143 """The connection for communicating with the remote cache. Performs
145 gets and sets by communicating with an external process that has the
144 gets and sets by communicating with an external process that has the
146 cache-specific implementation.
145 cache-specific implementation.
@@ -303,7 +302,7 b' def _getfiles_threaded('
303 pipeo.flush()
302 pipeo.flush()
304
303
305
304
306 class fileserverclient(object):
305 class fileserverclient:
307 """A client for requesting files from the remote file server."""
306 """A client for requesting files from the remote file server."""
308
307
309 def __init__(self, repo):
308 def __init__(self, repo):
@@ -518,7 +517,7 b' class fileserverclient(object):'
518 # returns cache misses. This enables tests to run easily
517 # returns cache misses. This enables tests to run easily
519 # and may eventually allow us to be a drop in replacement
518 # and may eventually allow us to be a drop in replacement
520 # for the largefiles extension.
519 # for the largefiles extension.
521 class simplecache(object):
520 class simplecache:
522 def __init__(self):
521 def __init__(self):
523 self.missingids = []
522 self.missingids = []
524 self.connected = True
523 self.connected = True
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import struct
1 import struct
4
2
5 from mercurial.node import (
3 from mercurial.node import (
@@ -7,7 +5,6 b' from mercurial.node import ('
7 sha1nodeconstants,
5 sha1nodeconstants,
8 )
6 )
9 from mercurial import (
7 from mercurial import (
10 pycompat,
11 util,
8 util,
12 )
9 )
13 from mercurial.utils import hashutil
10 from mercurial.utils import hashutil
@@ -209,7 +206,7 b' class historypack(basepack.basepack):'
209 start = fanout[fanoutkey] + params.indexstart
206 start = fanout[fanoutkey] + params.indexstart
210 indexend = self._indexend
207 indexend = self._indexend
211
208
212 for i in pycompat.xrange(fanoutkey + 1, params.fanoutcount):
209 for i in range(fanoutkey + 1, params.fanoutcount):
213 end = fanout[i] + params.indexstart
210 end = fanout[i] + params.indexstart
214 if end != start:
211 if end != start:
215 break
212 break
@@ -325,7 +322,7 b' class historypack(basepack.basepack):'
325 )[0]
322 )[0]
326 offset += ENTRYCOUNTSIZE
323 offset += ENTRYCOUNTSIZE
327
324
328 for i in pycompat.xrange(revcount):
325 for i in range(revcount):
329 entry = struct.unpack(
326 entry = struct.unpack(
330 PACKFORMAT, data[offset : offset + PACKENTRYLENGTH]
327 PACKFORMAT, data[offset : offset + PACKENTRYLENGTH]
331 )
328 )
@@ -521,7 +518,7 b' class mutablehistorypack(basepack.mutabl'
521
518
522 files = (
519 files = (
523 (hashutil.sha1(filename).digest(), filename, offset, size)
520 (hashutil.sha1(filename).digest(), filename, offset, size)
524 for filename, (offset, size) in pycompat.iteritems(self.files)
521 for filename, (offset, size) in self.files.items()
525 )
522 )
526 files = sorted(files)
523 files = sorted(files)
527
524
@@ -557,7 +554,7 b' class mutablehistorypack(basepack.mutabl'
557 )
554 )
558 nodeindexoffset += constants.FILENAMESIZE + len(filename)
555 nodeindexoffset += constants.FILENAMESIZE + len(filename)
559
556
560 for node, location in sorted(pycompat.iteritems(nodelocations)):
557 for node, location in sorted(nodelocations.items()):
561 nodeindexentries.append(
558 nodeindexentries.append(
562 struct.pack(nodeindexformat, node, location)
559 struct.pack(nodeindexformat, node, location)
563 )
560 )
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 from mercurial.node import (
1 from mercurial.node import (
4 hex,
2 hex,
5 sha1nodeconstants,
3 sha1nodeconstants,
@@ -143,7 +141,7 b' class remotefilelogmetadatastore(basesto'
143 )
141 )
144
142
145
143
146 class remotemetadatastore(object):
144 class remotemetadatastore:
147 def __init__(self, ui, fileservice, shared):
145 def __init__(self, ui, fileservice, shared):
148 self._fileservice = fileservice
146 self._fileservice = fileservice
149 self._shared = shared
147 self._shared = shared
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import collections
8 import collections
10 import time
9 import time
@@ -5,7 +5,6 b''
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 from __future__ import absolute_import
9
8
10 import collections
9 import collections
11 import os
10 import os
@@ -16,7 +15,6 b' from mercurial import ('
16 ancestor,
15 ancestor,
17 error,
16 error,
18 mdiff,
17 mdiff,
19 pycompat,
20 revlog,
18 revlog,
21 )
19 )
22 from mercurial.utils import storageutil
20 from mercurial.utils import storageutil
@@ -29,7 +27,7 b' from . import ('
29 )
27 )
30
28
31
29
32 class remotefilelognodemap(object):
30 class remotefilelognodemap:
33 def __init__(self, filename, store):
31 def __init__(self, filename, store):
34 self._filename = filename
32 self._filename = filename
35 self._store = store
33 self._store = store
@@ -44,7 +42,7 b' class remotefilelognodemap(object):'
44 return node
42 return node
45
43
46
44
47 class remotefilelog(object):
45 class remotefilelog:
48
46
49 _generaldelta = True
47 _generaldelta = True
50 _flagserrorclass = error.RevlogError
48 _flagserrorclass = error.RevlogError
@@ -424,7 +422,7 b' class remotefilelog(object):'
424 return self.repo.nullid
422 return self.repo.nullid
425
423
426 revmap, parentfunc = self._buildrevgraph(a, b)
424 revmap, parentfunc = self._buildrevgraph(a, b)
427 nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)}
425 nodemap = {v: k for (k, v) in revmap.items()}
428
426
429 ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b])
427 ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b])
430 if ancs:
428 if ancs:
@@ -439,7 +437,7 b' class remotefilelog(object):'
439 return self.repo.nullid
437 return self.repo.nullid
440
438
441 revmap, parentfunc = self._buildrevgraph(a, b)
439 revmap, parentfunc = self._buildrevgraph(a, b)
442 nodemap = {v: k for (k, v) in pycompat.iteritems(revmap)}
440 nodemap = {v: k for (k, v) in revmap.items()}
443
441
444 ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b])
442 ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b])
445 return map(nodemap.__getitem__, ancs)
443 return map(nodemap.__getitem__, ancs)
@@ -455,7 +453,7 b' class remotefilelog(object):'
455 parentsmap = collections.defaultdict(list)
453 parentsmap = collections.defaultdict(list)
456 allparents = set()
454 allparents = set()
457 for mapping in (amap, bmap):
455 for mapping in (amap, bmap):
458 for node, pdata in pycompat.iteritems(mapping):
456 for node, pdata in mapping.items():
459 parents = parentsmap[node]
457 parents = parentsmap[node]
460 p1, p2, linknode, copyfrom = pdata
458 p1, p2, linknode, copyfrom = pdata
461 # Don't follow renames (copyfrom).
459 # Don't follow renames (copyfrom).
@@ -4,9 +4,7 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import errno
10 import os
8 import os
11 import stat
9 import stat
12 import time
10 import time
@@ -22,7 +20,6 b' from mercurial import ('
22 error,
20 error,
23 extensions,
21 extensions,
24 match,
22 match,
25 pycompat,
26 scmutil,
23 scmutil,
27 store,
24 store,
28 streamclone,
25 streamclone,
@@ -95,7 +92,7 b' def onetimesetup(ui):'
95 b'x_rfl_getfile', b'file node', permission=b'pull'
92 b'x_rfl_getfile', b'file node', permission=b'pull'
96 )(getfile)
93 )(getfile)
97
94
98 class streamstate(object):
95 class streamstate:
99 match = None
96 match = None
100 shallowremote = False
97 shallowremote = False
101 noflatmf = False
98 noflatmf = False
@@ -257,9 +254,8 b' def _loadfileblob(repo, cachepath, path,'
257 if not os.path.exists(dirname):
254 if not os.path.exists(dirname):
258 try:
255 try:
259 os.makedirs(dirname)
256 os.makedirs(dirname)
260 except OSError as ex:
257 except FileExistsError:
261 if ex.errno != errno.EEXIST:
258 pass
262 raise
263
259
264 f = None
260 f = None
265 try:
261 try:
@@ -417,7 +413,7 b' def gcserver(ui, repo):'
417 cachepath = repo.vfs.join(b"remotefilelogcache")
413 cachepath = repo.vfs.join(b"remotefilelogcache")
418 for head in heads:
414 for head in heads:
419 mf = repo[head].manifest()
415 mf = repo[head].manifest()
420 for filename, filenode in pycompat.iteritems(mf):
416 for filename, filenode in mf.items():
421 filecachepath = os.path.join(cachepath, filename, hex(filenode))
417 filecachepath = os.path.join(cachepath, filename, hex(filenode))
422 neededfiles.add(filecachepath)
418 neededfiles.add(filecachepath)
423
419
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import os
1 import os
4 import time
2 import time
5
3
@@ -11,7 +9,6 b' from mercurial import ('
11 lock as lockmod,
9 lock as lockmod,
12 mdiff,
10 mdiff,
13 policy,
11 policy,
14 pycompat,
15 scmutil,
12 scmutil,
16 util,
13 util,
17 vfs,
14 vfs,
@@ -349,7 +346,7 b' def _computeincrementalpack(files, opts)'
349
346
350 # Group the packs by generation (i.e. by size)
347 # Group the packs by generation (i.e. by size)
351 generations = []
348 generations = []
352 for i in pycompat.xrange(len(limits)):
349 for i in range(len(limits)):
353 generations.append([])
350 generations.append([])
354
351
355 sizes = {}
352 sizes = {}
@@ -489,18 +486,18 b' def keepset(repo, keyfn, lastkeepkeys=No'
489 if type(m) is dict:
486 if type(m) is dict:
490 # m is a result of diff of two manifests and is a dictionary that
487 # m is a result of diff of two manifests and is a dictionary that
491 # maps filename to ((newnode, newflag), (oldnode, oldflag)) tuple
488 # maps filename to ((newnode, newflag), (oldnode, oldflag)) tuple
492 for filename, diff in pycompat.iteritems(m):
489 for filename, diff in m.items():
493 if diff[0][0] is not None:
490 if diff[0][0] is not None:
494 keepkeys.add(keyfn(filename, diff[0][0]))
491 keepkeys.add(keyfn(filename, diff[0][0]))
495 else:
492 else:
496 # m is a manifest object
493 # m is a manifest object
497 for filename, filenode in pycompat.iteritems(m):
494 for filename, filenode in m.items():
498 keepkeys.add(keyfn(filename, filenode))
495 keepkeys.add(keyfn(filename, filenode))
499
496
500 return keepkeys
497 return keepkeys
501
498
502
499
503 class repacker(object):
500 class repacker:
504 """Class for orchestrating the repack of data and history information into a
501 """Class for orchestrating the repack of data and history information into a
505 new format.
502 new format.
506 """
503 """
@@ -596,7 +593,7 b' class repacker(object):'
596 maxchainlen = ui.configint(b'packs', b'maxchainlen', 1000)
593 maxchainlen = ui.configint(b'packs', b'maxchainlen', 1000)
597
594
598 byfile = {}
595 byfile = {}
599 for entry in pycompat.itervalues(ledger.entries):
596 for entry in ledger.entries.values():
600 if entry.datasource:
597 if entry.datasource:
601 byfile.setdefault(entry.filename, {})[entry.node] = entry
598 byfile.setdefault(entry.filename, {})[entry.node] = entry
602
599
@@ -604,7 +601,7 b' class repacker(object):'
604 repackprogress = ui.makeprogress(
601 repackprogress = ui.makeprogress(
605 _(b"repacking data"), unit=self.unit, total=len(byfile)
602 _(b"repacking data"), unit=self.unit, total=len(byfile)
606 )
603 )
607 for filename, entries in sorted(pycompat.iteritems(byfile)):
604 for filename, entries in sorted(byfile.items()):
608 repackprogress.update(count)
605 repackprogress.update(count)
609
606
610 ancestors = {}
607 ancestors = {}
@@ -751,14 +748,14 b' class repacker(object):'
751 ui = self.repo.ui
748 ui = self.repo.ui
752
749
753 byfile = {}
750 byfile = {}
754 for entry in pycompat.itervalues(ledger.entries):
751 for entry in ledger.entries.values():
755 if entry.historysource:
752 if entry.historysource:
756 byfile.setdefault(entry.filename, {})[entry.node] = entry
753 byfile.setdefault(entry.filename, {})[entry.node] = entry
757
754
758 progress = ui.makeprogress(
755 progress = ui.makeprogress(
759 _(b"repacking history"), unit=self.unit, total=len(byfile)
756 _(b"repacking history"), unit=self.unit, total=len(byfile)
760 )
757 )
761 for filename, entries in sorted(pycompat.iteritems(byfile)):
758 for filename, entries in sorted(byfile.items()):
762 ancestors = {}
759 ancestors = {}
763 nodes = list(node for node in entries)
760 nodes = list(node for node in entries)
764
761
@@ -821,7 +818,7 b' class repacker(object):'
821 return sortednodes
818 return sortednodes
822
819
823
820
824 class repackledger(object):
821 class repackledger:
825 """Storage for all the bookkeeping that happens during a repack. It contains
822 """Storage for all the bookkeeping that happens during a repack. It contains
826 the list of revisions being repacked, what happened to each revision, and
823 the list of revisions being repacked, what happened to each revision, and
827 which source store contained which revision originally (for later cleanup).
824 which source store contained which revision originally (for later cleanup).
@@ -869,7 +866,7 b' class repackledger(object):'
869 self.created.add(value)
866 self.created.add(value)
870
867
871
868
872 class repackentry(object):
869 class repackentry:
873 """Simple class representing a single revision entry in the repackledger."""
870 """Simple class representing a single revision entry in the repackledger."""
874
871
875 __slots__ = (
872 __slots__ = (
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 from mercurial.i18n import _
8 from mercurial.i18n import _
10 from mercurial.node import bin, hex
9 from mercurial.node import bin, hex
@@ -14,7 +13,6 b' from mercurial import ('
14 error,
13 error,
15 match,
14 match,
16 mdiff,
15 mdiff,
17 pycompat,
18 )
16 )
19 from . import (
17 from . import (
20 constants,
18 constants,
@@ -44,7 +42,7 b' def shallowgroup(cls, self, nodelist, rl'
44 nodelist.insert(0, p)
42 nodelist.insert(0, p)
45
43
46 # build deltas
44 # build deltas
47 for i in pycompat.xrange(len(nodelist) - 1):
45 for i in range(len(nodelist) - 1):
48 prev, curr = nodelist[i], nodelist[i + 1]
46 prev, curr = nodelist[i], nodelist[i + 1]
49 linknode = lookup(curr)
47 linknode = lookup(curr)
50 for c in self.nodechunk(rlog, curr, prev, linknode):
48 for c in self.nodechunk(rlog, curr, prev, linknode):
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import os
8 import os
10
9
@@ -15,7 +14,6 b' from mercurial import ('
15 error,
14 error,
16 localrepo,
15 localrepo,
17 match,
16 match,
18 pycompat,
19 scmutil,
17 scmutil,
20 sparse,
18 sparse,
21 util,
19 util,
@@ -269,7 +267,7 b' def wraprepo(repo):'
269 mfrevlog = mfl.getstorage(b'')
267 mfrevlog = mfl.getstorage(b'')
270 if base is not None:
268 if base is not None:
271 mfdict = mfl[repo[base].manifestnode()].read()
269 mfdict = mfl[repo[base].manifestnode()].read()
272 skip = set(pycompat.iteritems(mfdict))
270 skip = set(mfdict.items())
273 else:
271 else:
274 skip = set()
272 skip = set()
275
273
@@ -299,7 +297,7 b' def wraprepo(repo):'
299 else:
297 else:
300 mfdict = mfl[mfnode].read()
298 mfdict = mfl[mfnode].read()
301
299
302 diff = pycompat.iteritems(mfdict)
300 diff = mfdict.items()
303 if pats:
301 if pats:
304 diff = (pf for pf in diff if m(pf[0]))
302 diff = (pf for pf in diff if m(pf[0]))
305 if sparsematch:
303 if sparsematch:
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9
8
10 def wrapstore(store):
9 def wrapstore(store):
@@ -4,10 +4,8 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 import collections
8 import collections
10 import errno
11 import os
9 import os
12 import stat
10 import stat
13 import struct
11 import struct
@@ -103,7 +101,7 b' def sumdicts(*dicts):'
103 """
101 """
104 result = collections.defaultdict(lambda: 0)
102 result = collections.defaultdict(lambda: 0)
105 for dict in dicts:
103 for dict in dicts:
106 for k, v in pycompat.iteritems(dict):
104 for k, v in dict.items():
107 result[k] += v
105 result[k] += v
108 return result
106 return result
109
107
@@ -111,7 +109,7 b' def sumdicts(*dicts):'
111 def prefixkeys(dict, prefix):
109 def prefixkeys(dict, prefix):
112 """Returns ``dict`` with ``prefix`` prepended to all its keys."""
110 """Returns ``dict`` with ``prefix`` prepended to all its keys."""
113 result = {}
111 result = {}
114 for k, v in pycompat.iteritems(dict):
112 for k, v in dict.items():
115 result[prefix + k] = v
113 result[prefix + k] = v
116 return result
114 return result
117
115
@@ -160,7 +158,7 b' def _buildpackmeta(metadict):'
160 length limit is exceeded
158 length limit is exceeded
161 """
159 """
162 metabuf = b''
160 metabuf = b''
163 for k, v in sorted(pycompat.iteritems((metadict or {}))):
161 for k, v in sorted((metadict or {}).items()):
164 if len(k) != 1:
162 if len(k) != 1:
165 raise error.ProgrammingError(b'packmeta: illegal key: %s' % k)
163 raise error.ProgrammingError(b'packmeta: illegal key: %s' % k)
166 if len(v) > 0xFFFE:
164 if len(v) > 0xFFFE:
@@ -176,8 +174,8 b' def _buildpackmeta(metadict):'
176
174
177
175
178 _metaitemtypes = {
176 _metaitemtypes = {
179 constants.METAKEYFLAG: (int, pycompat.long),
177 constants.METAKEYFLAG: (int, int),
180 constants.METAKEYSIZE: (int, pycompat.long),
178 constants.METAKEYSIZE: (int, int),
181 }
179 }
182
180
183
181
@@ -188,7 +186,7 b' def buildpackmeta(metadict):'
188 and METAKEYFLAG will be dropped if its value is 0.
186 and METAKEYFLAG will be dropped if its value is 0.
189 """
187 """
190 newmeta = {}
188 newmeta = {}
191 for k, v in pycompat.iteritems(metadict or {}):
189 for k, v in (metadict or {}).items():
192 expectedtype = _metaitemtypes.get(k, (bytes,))
190 expectedtype = _metaitemtypes.get(k, (bytes,))
193 if not isinstance(v, expectedtype):
191 if not isinstance(v, expectedtype):
194 raise error.ProgrammingError(b'packmeta: wrong type of key %s' % k)
192 raise error.ProgrammingError(b'packmeta: wrong type of key %s' % k)
@@ -209,7 +207,7 b' def parsepackmeta(metabuf):'
209 integers.
207 integers.
210 """
208 """
211 metadict = _parsepackmeta(metabuf)
209 metadict = _parsepackmeta(metabuf)
212 for k, v in pycompat.iteritems(metadict):
210 for k, v in metadict.items():
213 if k in _metaitemtypes and int in _metaitemtypes[k]:
211 if k in _metaitemtypes and int in _metaitemtypes[k]:
214 metadict[k] = bin2int(v)
212 metadict[k] = bin2int(v)
215 return metadict
213 return metadict
@@ -360,9 +358,8 b' def writefile(path, content, readonly=Fa'
360 if not os.path.exists(dirname):
358 if not os.path.exists(dirname):
361 try:
359 try:
362 os.makedirs(dirname)
360 os.makedirs(dirname)
363 except OSError as ex:
361 except FileExistsError:
364 if ex.errno != errno.EEXIST:
362 pass
365 raise
366
363
367 fd, temp = tempfile.mkstemp(prefix=b'.%s-' % filename, dir=dirname)
364 fd, temp = tempfile.mkstemp(prefix=b'.%s-' % filename, dir=dirname)
368 os.close(fd)
365 os.close(fd)
@@ -455,14 +452,14 b' def readpath(stream):'
455 def readnodelist(stream):
452 def readnodelist(stream):
456 rawlen = readexactly(stream, constants.NODECOUNTSIZE)
453 rawlen = readexactly(stream, constants.NODECOUNTSIZE)
457 nodecount = struct.unpack(constants.NODECOUNTSTRUCT, rawlen)[0]
454 nodecount = struct.unpack(constants.NODECOUNTSTRUCT, rawlen)[0]
458 for i in pycompat.xrange(nodecount):
455 for i in range(nodecount):
459 yield readexactly(stream, constants.NODESIZE)
456 yield readexactly(stream, constants.NODESIZE)
460
457
461
458
462 def readpathlist(stream):
459 def readpathlist(stream):
463 rawlen = readexactly(stream, constants.PATHCOUNTSIZE)
460 rawlen = readexactly(stream, constants.PATHCOUNTSIZE)
464 pathcount = struct.unpack(constants.PATHCOUNTSTRUCT, rawlen)[0]
461 pathcount = struct.unpack(constants.PATHCOUNTSTRUCT, rawlen)[0]
465 for i in pycompat.xrange(pathcount):
462 for i in range(pathcount):
466 yield readpath(stream)
463 yield readpath(stream)
467
464
468
465
@@ -520,9 +517,8 b' def mkstickygroupdir(ui, path):'
520 for path in reversed(missingdirs):
517 for path in reversed(missingdirs):
521 try:
518 try:
522 os.mkdir(path)
519 os.mkdir(path)
523 except OSError as ex:
520 except FileExistsError:
524 if ex.errno != errno.EEXIST:
521 pass
525 raise
526
522
527 for path in missingdirs:
523 for path in missingdirs:
528 setstickygroupdir(path, gid, ui.warn)
524 setstickygroupdir(path, gid, ui.warn)
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 from mercurial.i18n import _
8 from mercurial.i18n import _
10 from mercurial import verify
9 from mercurial import verify
@@ -24,7 +24,8 b' remotenames.hoistedpeer'
24 namespace (default: 'default')
24 namespace (default: 'default')
25 """
25 """
26
26
27 from __future__ import absolute_import
27
28 import collections.abc
28
29
29 from mercurial.i18n import _
30 from mercurial.i18n import _
30
31
@@ -35,7 +36,6 b' from mercurial import ('
35 extensions,
36 extensions,
36 logexchange,
37 logexchange,
37 namespaces,
38 namespaces,
38 pycompat,
39 registrar,
39 registrar,
40 revsetlang,
40 revsetlang,
41 smartset,
41 smartset,
@@ -45,15 +45,6 b' from mercurial import ('
45
45
46 from mercurial.utils import stringutil
46 from mercurial.utils import stringutil
47
47
48 if pycompat.ispy3:
49 import collections.abc
50
51 mutablemapping = collections.abc.MutableMapping
52 else:
53 import collections
54
55 mutablemapping = collections.MutableMapping
56
57 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
48 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
58 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
49 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
59 # be specifying the version(s) of Mercurial they are tested with, or
50 # be specifying the version(s) of Mercurial they are tested with, or
@@ -82,7 +73,7 b' configitem('
82 )
73 )
83
74
84
75
85 class lazyremotenamedict(mutablemapping):
76 class lazyremotenamedict(collections.abc.MutableMapping):
86 """
77 """
87 Read-only dict-like Class to lazily resolve remotename entries
78 Read-only dict-like Class to lazily resolve remotename entries
88
79
@@ -171,13 +162,13 b' class lazyremotenamedict(mutablemapping)'
171 if not self.loaded:
162 if not self.loaded:
172 self._load()
163 self._load()
173
164
174 for k, vtup in pycompat.iteritems(self.potentialentries):
165 for k, vtup in self.potentialentries.items():
175 yield (k, [bin(vtup[0])])
166 yield (k, [bin(vtup[0])])
176
167
177 items = iteritems
168 items = iteritems
178
169
179
170
180 class remotenames(object):
171 class remotenames:
181 """
172 """
182 This class encapsulates all the remotenames state. It also contains
173 This class encapsulates all the remotenames state. It also contains
183 methods to access that state in convenient ways. Remotenames are lazy
174 methods to access that state in convenient ways. Remotenames are lazy
@@ -208,7 +199,7 b' class remotenames(object):'
208 if not self._nodetobmarks:
199 if not self._nodetobmarks:
209 bmarktonodes = self.bmarktonodes()
200 bmarktonodes = self.bmarktonodes()
210 self._nodetobmarks = {}
201 self._nodetobmarks = {}
211 for name, node in pycompat.iteritems(bmarktonodes):
202 for name, node in bmarktonodes.items():
212 self._nodetobmarks.setdefault(node[0], []).append(name)
203 self._nodetobmarks.setdefault(node[0], []).append(name)
213 return self._nodetobmarks
204 return self._nodetobmarks
214
205
@@ -219,7 +210,7 b' class remotenames(object):'
219 if not self._nodetobranch:
210 if not self._nodetobranch:
220 branchtonodes = self.branchtonodes()
211 branchtonodes = self.branchtonodes()
221 self._nodetobranch = {}
212 self._nodetobranch = {}
222 for name, nodes in pycompat.iteritems(branchtonodes):
213 for name, nodes in branchtonodes.items():
223 for node in nodes:
214 for node in nodes:
224 self._nodetobranch.setdefault(node, []).append(name)
215 self._nodetobranch.setdefault(node, []).append(name)
225 return self._nodetobranch
216 return self._nodetobranch
@@ -229,7 +220,7 b' class remotenames(object):'
229 marktonodes = self.bmarktonodes()
220 marktonodes = self.bmarktonodes()
230 self._hoisttonodes = {}
221 self._hoisttonodes = {}
231 hoist += b'/'
222 hoist += b'/'
232 for name, node in pycompat.iteritems(marktonodes):
223 for name, node in marktonodes.items():
233 if name.startswith(hoist):
224 if name.startswith(hoist):
234 name = name[len(hoist) :]
225 name = name[len(hoist) :]
235 self._hoisttonodes[name] = node
226 self._hoisttonodes[name] = node
@@ -240,7 +231,7 b' class remotenames(object):'
240 marktonodes = self.bmarktonodes()
231 marktonodes = self.bmarktonodes()
241 self._nodetohoists = {}
232 self._nodetohoists = {}
242 hoist += b'/'
233 hoist += b'/'
243 for name, node in pycompat.iteritems(marktonodes):
234 for name, node in marktonodes.items():
244 if name.startswith(hoist):
235 if name.startswith(hoist):
245 name = name[len(hoist) :]
236 name = name[len(hoist) :]
246 self._nodetohoists.setdefault(node[0], []).append(name)
237 self._nodetohoists.setdefault(node[0], []).append(name)
@@ -39,7 +39,6 b' For convenience, the extension adds thes'
39 You can override a predefined scheme by defining a new scheme with the
39 You can override a predefined scheme by defining a new scheme with the
40 same name.
40 same name.
41 """
41 """
42 from __future__ import absolute_import
43
42
44 import os
43 import os
45 import re
44 import re
@@ -68,7 +67,7 b" testedwith = b'ships-with-hg-core'"
68 _partre = re.compile(br'{(\d+)\}')
67 _partre = re.compile(br'{(\d+)\}')
69
68
70
69
71 class ShortRepository(object):
70 class ShortRepository:
72 def __init__(self, url, scheme, templater):
71 def __init__(self, url, scheme, templater):
73 self.scheme = scheme
72 self.scheme = scheme
74 self.templater = templater
73 self.templater = templater
@@ -65,9 +65,7 b' The following ``share.`` config options '
65 and there are no untracked files, delete that share and create a new share.
65 and there are no untracked files, delete that share and create a new share.
66 '''
66 '''
67
67
68 from __future__ import absolute_import
69
68
70 import errno
71 from mercurial.i18n import _
69 from mercurial.i18n import _
72 from mercurial import (
70 from mercurial import (
73 bookmarks,
71 bookmarks,
@@ -178,9 +176,7 b' def _hassharedbookmarks(repo):'
178 return False
176 return False
179 try:
177 try:
180 shared = repo.vfs.read(b'shared').splitlines()
178 shared = repo.vfs.read(b'shared').splitlines()
181 except IOError as inst:
179 except FileNotFoundError:
182 if inst.errno != errno.ENOENT:
183 raise
184 return False
180 return False
185 return hg.sharedbookmarks in shared
181 return hg.sharedbookmarks in shared
186
182
@@ -200,9 +196,8 b' def getbkfile(orig, repo):'
200 # is up-to-date.
196 # is up-to-date.
201 return fp
197 return fp
202 fp.close()
198 fp.close()
203 except IOError as inst:
199 except FileNotFoundError:
204 if inst.errno != errno.ENOENT:
200 pass
205 raise
206
201
207 # otherwise, we should read bookmarks from srcrepo,
202 # otherwise, we should read bookmarks from srcrepo,
208 # because .hg/bookmarks in srcrepo might be already
203 # because .hg/bookmarks in srcrepo might be already
@@ -25,7 +25,6 b' The following config options can influen'
25 performed.
25 performed.
26 """
26 """
27
27
28 from __future__ import absolute_import
29
28
30 from mercurial.i18n import _
29 from mercurial.i18n import _
31 from mercurial.node import nullrev
30 from mercurial.node import nullrev
@@ -71,18 +71,15 b' certain files::'
71 tools/tests/**
71 tools/tests/**
72 """
72 """
73
73
74 from __future__ import absolute_import
75
74
76 from mercurial.i18n import _
75 from mercurial.i18n import _
77 from mercurial.pycompat import setattr
76 from mercurial.pycompat import setattr
78 from mercurial import (
77 from mercurial import (
79 cmdutil,
78 cmdutil,
80 commands,
79 commands,
81 dirstate,
82 error,
80 error,
83 extensions,
81 extensions,
84 logcmdutil,
82 logcmdutil,
85 match as matchmod,
86 merge as mergemod,
83 merge as mergemod,
87 pycompat,
84 pycompat,
88 registrar,
85 registrar,
@@ -106,7 +103,6 b' def extsetup(ui):'
106 _setupclone(ui)
103 _setupclone(ui)
107 _setuplog(ui)
104 _setuplog(ui)
108 _setupadd(ui)
105 _setupadd(ui)
109 _setupdirstate(ui)
110
106
111
107
112 def replacefilecache(cls, propname, replacement):
108 def replacefilecache(cls, propname, replacement):
@@ -209,69 +205,6 b' def _setupadd(ui):'
209 extensions.wrapcommand(commands.table, b'add', _add)
205 extensions.wrapcommand(commands.table, b'add', _add)
210
206
211
207
212 def _setupdirstate(ui):
213 """Modify the dirstate to prevent stat'ing excluded files,
214 and to prevent modifications to files outside the checkout.
215 """
216
217 def walk(orig, self, match, subrepos, unknown, ignored, full=True):
218 # hack to not exclude explicitly-specified paths so that they can
219 # be warned later on e.g. dirstate.add()
220 em = matchmod.exact(match.files())
221 sm = matchmod.unionmatcher([self._sparsematcher, em])
222 match = matchmod.intersectmatchers(match, sm)
223 return orig(self, match, subrepos, unknown, ignored, full)
224
225 extensions.wrapfunction(dirstate.dirstate, b'walk', walk)
226
227 # dirstate.rebuild should not add non-matching files
228 def _rebuild(orig, self, parent, allfiles, changedfiles=None):
229 matcher = self._sparsematcher
230 if not matcher.always():
231 allfiles = [f for f in allfiles if matcher(f)]
232 if changedfiles:
233 changedfiles = [f for f in changedfiles if matcher(f)]
234
235 if changedfiles is not None:
236 # In _rebuild, these files will be deleted from the dirstate
237 # when they are not found to be in allfiles
238 dirstatefilestoremove = {f for f in self if not matcher(f)}
239 changedfiles = dirstatefilestoremove.union(changedfiles)
240
241 return orig(self, parent, allfiles, changedfiles)
242
243 extensions.wrapfunction(dirstate.dirstate, b'rebuild', _rebuild)
244
245 # Prevent adding files that are outside the sparse checkout
246 editfuncs = [
247 b'set_tracked',
248 b'set_untracked',
249 b'copy',
250 ]
251 hint = _(
252 b'include file with `hg debugsparse --include <pattern>` or use '
253 + b'`hg add -s <file>` to include file directory while adding'
254 )
255 for func in editfuncs:
256
257 def _wrapper(orig, self, *args, **kwargs):
258 sparsematch = self._sparsematcher
259 if not sparsematch.always():
260 for f in args:
261 if f is not None and not sparsematch(f) and f not in self:
262 raise error.Abort(
263 _(
264 b"cannot add '%s' - it is outside "
265 b"the sparse checkout"
266 )
267 % f,
268 hint=hint,
269 )
270 return orig(self, *args, **kwargs)
271
272 extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
273
274
275 @command(
208 @command(
276 b'debugsparse',
209 b'debugsparse',
277 [
210 [
@@ -398,6 +331,9 b' def debugsparse(ui, repo, **opts):'
398 if count > 1:
331 if count > 1:
399 raise error.Abort(_(b"too many flags specified"))
332 raise error.Abort(_(b"too many flags specified"))
400
333
334 # enable sparse on repo even if the requirements is missing.
335 repo._has_sparse = True
336
401 if count == 0:
337 if count == 0:
402 if repo.vfs.exists(b'sparse'):
338 if repo.vfs.exists(b'sparse'):
403 ui.status(repo.vfs.read(b"sparse") + b"\n")
339 ui.status(repo.vfs.read(b"sparse") + b"\n")
@@ -453,3 +389,5 b' def debugsparse(ui, repo, **opts):'
453 )
389 )
454 finally:
390 finally:
455 wlock.release()
391 wlock.release()
392
393 del repo._has_sparse
@@ -7,7 +7,6 b''
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 """command to split a changeset into smaller ones (EXPERIMENTAL)"""
8 """command to split a changeset into smaller ones (EXPERIMENTAL)"""
9
9
10 from __future__ import absolute_import
11
10
12 from mercurial.i18n import _
11 from mercurial.i18n import _
13
12
@@ -43,7 +43,6 b' option to ``sqlite`` to enable new repos'
43 # --extra-config-opt extensions.sqlitestore= \
43 # --extra-config-opt extensions.sqlitestore= \
44 # --extra-config-opt storage.new-repo-backend=sqlite
44 # --extra-config-opt storage.new-repo-backend=sqlite
45
45
46 from __future__ import absolute_import
47
46
48 import sqlite3
47 import sqlite3
49 import struct
48 import struct
@@ -265,7 +264,7 b' class SQLiteStoreError(error.StorageErro'
265
264
266
265
267 @attr.s
266 @attr.s
268 class revisionentry(object):
267 class revisionentry:
269 rid = attr.ib()
268 rid = attr.ib()
270 rev = attr.ib()
269 rev = attr.ib()
271 node = attr.ib()
270 node = attr.ib()
@@ -279,7 +278,7 b' class revisionentry(object):'
279
278
280 @interfaceutil.implementer(repository.irevisiondelta)
279 @interfaceutil.implementer(repository.irevisiondelta)
281 @attr.s(slots=True)
280 @attr.s(slots=True)
282 class sqliterevisiondelta(object):
281 class sqliterevisiondelta:
283 node = attr.ib()
282 node = attr.ib()
284 p1node = attr.ib()
283 p1node = attr.ib()
285 p2node = attr.ib()
284 p2node = attr.ib()
@@ -295,14 +294,14 b' class sqliterevisiondelta(object):'
295
294
296 @interfaceutil.implementer(repository.iverifyproblem)
295 @interfaceutil.implementer(repository.iverifyproblem)
297 @attr.s(frozen=True)
296 @attr.s(frozen=True)
298 class sqliteproblem(object):
297 class sqliteproblem:
299 warning = attr.ib(default=None)
298 warning = attr.ib(default=None)
300 error = attr.ib(default=None)
299 error = attr.ib(default=None)
301 node = attr.ib(default=None)
300 node = attr.ib(default=None)
302
301
303
302
304 @interfaceutil.implementer(repository.ifilestorage)
303 @interfaceutil.implementer(repository.ifilestorage)
305 class sqlitefilestore(object):
304 class sqlitefilestore:
306 """Implements storage for an individual tracked path."""
305 """Implements storage for an individual tracked path."""
307
306
308 def __init__(self, db, path, compression):
307 def __init__(self, db, path, compression):
@@ -397,7 +396,7 b' class sqlitefilestore(object):'
397 return len(self._revisions)
396 return len(self._revisions)
398
397
399 def __iter__(self):
398 def __iter__(self):
400 return iter(pycompat.xrange(len(self._revisions)))
399 return iter(range(len(self._revisions)))
401
400
402 def hasnode(self, node):
401 def hasnode(self, node):
403 if node == sha1nodeconstants.nullid:
402 if node == sha1nodeconstants.nullid:
@@ -1250,7 +1249,7 b' def newreporequirements(orig, ui, create'
1250
1249
1251
1250
1252 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1251 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
1253 class sqlitefilestorage(object):
1252 class sqlitefilestorage:
1254 """Repository file storage backed by SQLite."""
1253 """Repository file storage backed by SQLite."""
1255
1254
1256 def file(self, path):
1255 def file(self, path):
@@ -6,7 +6,6 b' since version 5.7. Please use :hg:`debug'
6 This extension allows you to strip changesets and all their descendants from the
6 This extension allows you to strip changesets and all their descendants from the
7 repository. See the command help for details.
7 repository. See the command help for details.
8 """
8 """
9 from __future__ import absolute_import
10
9
11 from mercurial import commands
10 from mercurial import commands
12
11
@@ -13,7 +13,6 b' possibly in another repository. The tran'
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 Transplanted patches are recorded in .hg/transplant/transplants, as a
14 map from a changeset hash to its hash in the source repository.
14 map from a changeset hash to its hash in the source repository.
15 '''
15 '''
16 from __future__ import absolute_import
17
16
18 import os
17 import os
19
18
@@ -76,13 +75,13 b' configitem('
76 )
75 )
77
76
78
77
79 class transplantentry(object):
78 class transplantentry:
80 def __init__(self, lnode, rnode):
79 def __init__(self, lnode, rnode):
81 self.lnode = lnode
80 self.lnode = lnode
82 self.rnode = rnode
81 self.rnode = rnode
83
82
84
83
85 class transplants(object):
84 class transplants:
86 def __init__(self, path=None, transplantfile=None, opener=None):
85 def __init__(self, path=None, transplantfile=None, opener=None):
87 self.path = path
86 self.path = path
88 self.transplantfile = transplantfile
87 self.transplantfile = transplantfile
@@ -107,7 +106,7 b' class transplants(object):'
107 if not os.path.isdir(self.path):
106 if not os.path.isdir(self.path):
108 os.mkdir(self.path)
107 os.mkdir(self.path)
109 fp = self.opener(self.transplantfile, b'w')
108 fp = self.opener(self.transplantfile, b'w')
110 for list in pycompat.itervalues(self.transplants):
109 for list in self.transplants.values():
111 for t in list:
110 for t in list:
112 l, r = map(hex, (t.lnode, t.rnode))
111 l, r = map(hex, (t.lnode, t.rnode))
113 fp.write(l + b':' + r + b'\n')
112 fp.write(l + b':' + r + b'\n')
@@ -129,7 +128,7 b' class transplants(object):'
129 self.dirty = True
128 self.dirty = True
130
129
131
130
132 class transplanter(object):
131 class transplanter:
133 def __init__(self, ui, repo, opts):
132 def __init__(self, ui, repo, opts):
134 self.ui = ui
133 self.ui = ui
135 self.repo = repo
134 self.repo = repo
@@ -17,7 +17,6 b' removed in the changeset will be left un'
17 added and removed in the working directory.
17 added and removed in the working directory.
18 """
18 """
19
19
20 from __future__ import absolute_import
21
20
22 from mercurial.i18n import _
21 from mercurial.i18n import _
23
22
@@ -81,9 +80,7 b' def _commitfiltered('
81 files = initialfiles - exclude
80 files = initialfiles - exclude
82 # Filter copies
81 # Filter copies
83 copied = copiesmod.pathcopies(base, ctx)
82 copied = copiesmod.pathcopies(base, ctx)
84 copied = {
83 copied = {dst: src for dst, src in copied.items() if dst in files}
85 dst: src for dst, src in pycompat.iteritems(copied) if dst in files
86 }
87
84
88 def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
85 def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
89 if path not in contentctx:
86 if path not in contentctx:
@@ -44,7 +44,6 b' You can specify the encoding by config o'
44
44
45 It is useful for the users who want to commit with UTF-8 log message.
45 It is useful for the users who want to commit with UTF-8 log message.
46 '''
46 '''
47 from __future__ import absolute_import
48
47
49 import os
48 import os
50 import sys
49 import sys
@@ -95,7 +94,7 b' def decode(arg):'
95
94
96
95
97 def encode(arg):
96 def encode(arg):
98 if isinstance(arg, pycompat.unicode):
97 if isinstance(arg, str):
99 return arg.encode(_encoding)
98 return arg.encode(_encoding)
100 elif isinstance(arg, tuple):
99 elif isinstance(arg, tuple):
101 return tuple(map(encode, arg))
100 return tuple(map(encode, arg))
@@ -136,7 +135,7 b' def basewrapper(func, argtype, enc, dec,'
136
135
137
136
138 def wrapper(func, args, kwds):
137 def wrapper(func, args, kwds):
139 return basewrapper(func, pycompat.unicode, encode, decode, args, kwds)
138 return basewrapper(func, str, encode, decode, args, kwds)
140
139
141
140
142 def reversewrapper(func, args, kwds):
141 def reversewrapper(func, args, kwds):
@@ -41,7 +41,6 b' pushed or pulled::'
41 # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
41 # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
42 '''
42 '''
43
43
44 from __future__ import absolute_import
45
44
46 import re
45 import re
47 from mercurial.i18n import _
46 from mercurial.i18n import _
@@ -49,7 +48,6 b' from mercurial.node import short'
49 from mercurial import (
48 from mercurial import (
50 cmdutil,
49 cmdutil,
51 extensions,
50 extensions,
52 pycompat,
53 registrar,
51 registrar,
54 )
52 )
55 from mercurial.utils import stringutil
53 from mercurial.utils import stringutil
@@ -157,9 +155,7 b' def forbidnewline(ui, repo, hooktype, no'
157 # changegroup that contains an unacceptable commit followed later
155 # changegroup that contains an unacceptable commit followed later
158 # by a commit that fixes the problem.
156 # by a commit that fixes the problem.
159 tip = repo[b'tip']
157 tip = repo[b'tip']
160 for rev in pycompat.xrange(
158 for rev in range(repo.changelog.tiprev(), repo[node].rev() - 1, -1):
161 repo.changelog.tiprev(), repo[node].rev() - 1, -1
162 ):
163 c = repo[rev]
159 c = repo[rev]
164 for f in c.files():
160 for f in c.files():
165 if f in seen or f not in tip or f not in c:
161 if f in seen or f not in tip or f not in c:
@@ -213,7 +209,7 b' def forbidcr(ui, repo, hooktype, node, *'
213 def reposetup(ui, repo):
209 def reposetup(ui, repo):
214 if not repo.local():
210 if not repo.local():
215 return
211 return
216 for name, fn in pycompat.iteritems(_filters):
212 for name, fn in _filters.items():
217 repo.adddatafilter(name, fn)
213 repo.adddatafilter(name, fn)
218
214
219
215
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import, print_function
2
3 """ Multicast DNS Service Discovery for Python, v0.12
1 """ Multicast DNS Service Discovery for Python, v0.12
4 Copyright (C) 2003, Paul Scott-Murphy
2 Copyright (C) 2003, Paul Scott-Murphy
5
3
@@ -233,7 +231,7 b' class BadDomainNameCircular(BadDomainNam'
233 # implementation classes
231 # implementation classes
234
232
235
233
236 class DNSEntry(object):
234 class DNSEntry:
237 """A DNS entry"""
235 """A DNS entry"""
238
236
239 def __init__(self, name, type, clazz):
237 def __init__(self, name, type, clazz):
@@ -294,7 +292,7 b' class DNSQuestion(DNSEntry):'
294 """A DNS question entry"""
292 """A DNS question entry"""
295
293
296 def __init__(self, name, type, clazz):
294 def __init__(self, name, type, clazz):
297 if pycompat.ispy3 and isinstance(name, str):
295 if isinstance(name, str):
298 name = name.encode('ascii')
296 name = name.encode('ascii')
299 if not name.endswith(b".local."):
297 if not name.endswith(b".local."):
300 raise NonLocalNameException(name)
298 raise NonLocalNameException(name)
@@ -508,7 +506,7 b' class DNSService(DNSRecord):'
508 return self.toString(b"%s:%s" % (self.server, self.port))
506 return self.toString(b"%s:%s" % (self.server, self.port))
509
507
510
508
511 class DNSIncoming(object):
509 class DNSIncoming:
512 """Object representation of an incoming DNS packet"""
510 """Object representation of an incoming DNS packet"""
513
511
514 def __init__(self, data):
512 def __init__(self, data):
@@ -704,7 +702,7 b' class DNSIncoming(object):'
704 return result
702 return result
705
703
706
704
707 class DNSOutgoing(object):
705 class DNSOutgoing:
708 """Object representation of an outgoing packet"""
706 """Object representation of an outgoing packet"""
709
707
710 def __init__(self, flags, multicast=1):
708 def __init__(self, flags, multicast=1):
@@ -866,7 +864,7 b' class DNSOutgoing(object):'
866 return b''.join(self.data)
864 return b''.join(self.data)
867
865
868
866
869 class DNSCache(object):
867 class DNSCache:
870 """A cache of DNS entries"""
868 """A cache of DNS entries"""
871
869
872 def __init__(self):
870 def __init__(self):
@@ -984,7 +982,7 b' class Engine(threading.Thread):'
984 self.condition.release()
982 self.condition.release()
985
983
986
984
987 class Listener(object):
985 class Listener:
988 """A Listener is used by this module to listen on the multicast
986 """A Listener is used by this module to listen on the multicast
989 group to which DNS messages are sent, allowing the implementation
987 group to which DNS messages are sent, allowing the implementation
990 to cache information as it arrives.
988 to cache information as it arrives.
@@ -1129,7 +1127,7 b' class ServiceBrowser(threading.Thread):'
1129 event(self.zeroconf)
1127 event(self.zeroconf)
1130
1128
1131
1129
1132 class ServiceInfo(object):
1130 class ServiceInfo:
1133 """Service information"""
1131 """Service information"""
1134
1132
1135 def __init__(
1133 def __init__(
@@ -1388,7 +1386,7 b' class ServiceInfo(object):'
1388 return result
1386 return result
1389
1387
1390
1388
1391 class Zeroconf(object):
1389 class Zeroconf:
1392 """Implementation of Zeroconf Multicast DNS Service Discovery
1390 """Implementation of Zeroconf Multicast DNS Service Discovery
1393
1391
1394 Supports registration, unregistration, queries and browsing.
1392 Supports registration, unregistration, queries and browsing.
@@ -1461,7 +1459,7 b' class Zeroconf(object):'
1461 def notifyAll(self):
1459 def notifyAll(self):
1462 """Notifies all waiting threads"""
1460 """Notifies all waiting threads"""
1463 self.condition.acquire()
1461 self.condition.acquire()
1464 self.condition.notifyAll()
1462 self.condition.notify_all()
1465 self.condition.release()
1463 self.condition.release()
1466
1464
1467 def getServiceInfo(self, type, name, timeout=3000):
1465 def getServiceInfo(self, type, name, timeout=3000):
@@ -22,7 +22,6 b' You can discover Zeroconf-enabled reposi'
22 $ hg paths
22 $ hg paths
23 zc-test = http://example.com:8000/test
23 zc-test = http://example.com:8000/test
24 '''
24 '''
25 from __future__ import absolute_import
26
25
27 import os
26 import os
28 import socket
27 import socket
@@ -159,7 +158,7 b' def zc_create_server(create_server, ui, '
159 # listen
158 # listen
160
159
161
160
162 class listener(object):
161 class listener:
163 def __init__(self):
162 def __init__(self):
164 self.found = {}
163 self.found = {}
165
164
@@ -1,5 +1,4 b''
1 # name space package to host third party extensions
1 # name space package to host third party extensions
2 from __future__ import absolute_import
3 import pkgutil
2 import pkgutil
4
3
5 __path__ = pkgutil.extend_path(__path__, __name__)
4 __path__ = pkgutil.extend_path(__path__, __name__)
@@ -1,7 +1,6 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 #
2 #
3 # check-translation.py - check Mercurial specific translation problems
3 # check-translation.py - check Mercurial specific translation problems
4 from __future__ import absolute_import
5
4
6 import re
5 import re
7
6
@@ -20,7 +20,6 b' Use xgettext like normal to extract stri'
20 join the message cataloges to get the final catalog.
20 join the message cataloges to get the final catalog.
21 """
21 """
22
22
23 from __future__ import absolute_import, print_function
24
23
25 import inspect
24 import inspect
26 import os
25 import os
@@ -13,7 +13,6 b' modify entries, comments or metadata, et'
13 :func:`~polib.mofile` convenience functions.
13 :func:`~polib.mofile` convenience functions.
14 """
14 """
15
15
16 from __future__ import absolute_import
17
16
18 __author__ = 'David Jean Louis <izimobil@gmail.com>'
17 __author__ = 'David Jean Louis <izimobil@gmail.com>'
19 __version__ = '1.0.7'
18 __version__ = '1.0.7'
@@ -43,7 +42,7 b' try:'
43 except ImportError:
42 except ImportError:
44 # replacement of io.open() for python < 2.6
43 # replacement of io.open() for python < 2.6
45 # we use codecs instead
44 # we use codecs instead
46 class io(object):
45 class io:
47 @staticmethod
46 @staticmethod
48 def open(fpath, mode='r', encoding=None):
47 def open(fpath, mode='r', encoding=None):
49 return codecs.open(fpath, mode, encoding)
48 return codecs.open(fpath, mode, encoding)
@@ -817,7 +816,7 b' class MOFile(_BaseFile):'
817 # class _BaseEntry {{{
816 # class _BaseEntry {{{
818
817
819
818
820 class _BaseEntry(object):
819 class _BaseEntry:
821 """
820 """
822 Base class for :class:`~polib.POEntry` and :class:`~polib.MOEntry` classes.
821 Base class for :class:`~polib.POEntry` and :class:`~polib.MOEntry` classes.
823 This class should **not** be instanciated directly.
822 This class should **not** be instanciated directly.
@@ -1228,7 +1227,7 b' class MOEntry(_BaseEntry):'
1228 # class _POFileParser {{{
1227 # class _POFileParser {{{
1229
1228
1230
1229
1231 class _POFileParser(object):
1230 class _POFileParser:
1232 """
1231 """
1233 A finite state machine to parse efficiently and correctly po
1232 A finite state machine to parse efficiently and correctly po
1234 file format.
1233 file format.
@@ -1707,7 +1706,7 b' class _POFileParser(object):'
1707 # class _MOFileParser {{{
1706 # class _MOFileParser {{{
1708
1707
1709
1708
1710 class _MOFileParser(object):
1709 class _MOFileParser:
1711 """
1710 """
1712 A class to parse binary mo files.
1711 A class to parse binary mo files.
1713 """
1712 """
@@ -5,7 +5,6 b''
5 # license: MIT/X11/Expat
5 # license: MIT/X11/Expat
6 #
6 #
7
7
8 from __future__ import absolute_import, print_function
9
8
10 import polib
9 import polib
11 import re
10 import re
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 # Allow 'from mercurial import demandimport' to keep working.
9 # Allow 'from mercurial import demandimport' to keep working.
11 import hgdemandimport
10 import hgdemandimport
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import heapq
9 import heapq
11
10
@@ -13,7 +12,6 b' from .node import nullrev'
13 from . import (
12 from . import (
14 dagop,
13 dagop,
15 policy,
14 policy,
16 pycompat,
17 )
15 )
18
16
19 parsers = policy.importmod('parsers')
17 parsers = policy.importmod('parsers')
@@ -147,7 +145,7 b' def ancestors(pfunc, *orignodes):'
147 return deepest(gca)
145 return deepest(gca)
148
146
149
147
150 class incrementalmissingancestors(object):
148 class incrementalmissingancestors:
151 """persistent state used to calculate missing ancestors incrementally
149 """persistent state used to calculate missing ancestors incrementally
152
150
153 Although similar in spirit to lazyancestors below, this is a separate class
151 Although similar in spirit to lazyancestors below, this is a separate class
@@ -188,7 +186,7 b' class incrementalmissingancestors(object'
188 # no revs to consider
186 # no revs to consider
189 return
187 return
190
188
191 for curr in pycompat.xrange(start, min(revs) - 1, -1):
189 for curr in range(start, min(revs) - 1, -1):
192 if curr not in bases:
190 if curr not in bases:
193 continue
191 continue
194 revs.discard(curr)
192 revs.discard(curr)
@@ -229,7 +227,7 b' class incrementalmissingancestors(object'
229 # exit.
227 # exit.
230
228
231 missing = []
229 missing = []
232 for curr in pycompat.xrange(start, nullrev, -1):
230 for curr in range(start, nullrev, -1):
233 if not revsvisit:
231 if not revsvisit:
234 break
232 break
235
233
@@ -317,7 +315,7 b' def _lazyancestorsiter(parentrevs, initr'
317 see(p2)
315 see(p2)
318
316
319
317
320 class lazyancestors(object):
318 class lazyancestors:
321 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
319 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
322 """Create a new object generating ancestors for the given revs. Does
320 """Create a new object generating ancestors for the given revs. Does
323 not generate revs lower than stoprev.
321 not generate revs lower than stoprev.
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import gzip
9 import gzip
11 import os
10 import os
@@ -76,7 +75,7 b' exts = {'
76
75
77
76
78 def guesskind(dest):
77 def guesskind(dest):
79 for kind, extensions in pycompat.iteritems(exts):
78 for kind, extensions in exts.items():
80 if any(dest.endswith(ext) for ext in extensions):
79 if any(dest.endswith(ext) for ext in extensions):
81 return kind
80 return kind
82 return None
81 return None
@@ -133,43 +132,10 b' def buildmetadata(ctx):'
133 return out.getvalue()
132 return out.getvalue()
134
133
135
134
136 class tarit(object):
135 class tarit:
137 """write archive to tar file or stream. can write uncompressed,
136 """write archive to tar file or stream. can write uncompressed,
138 or compress with gzip or bzip2."""
137 or compress with gzip or bzip2."""
139
138
140 if pycompat.ispy3:
141 GzipFileWithTime = gzip.GzipFile # camelcase-required
142 else:
143
144 class GzipFileWithTime(gzip.GzipFile):
145 def __init__(self, *args, **kw):
146 timestamp = None
147 if 'mtime' in kw:
148 timestamp = kw.pop('mtime')
149 if timestamp is None:
150 self.timestamp = time.time()
151 else:
152 self.timestamp = timestamp
153 gzip.GzipFile.__init__(self, *args, **kw)
154
155 def _write_gzip_header(self):
156 self.fileobj.write(b'\037\213') # magic header
157 self.fileobj.write(b'\010') # compression method
158 fname = self.name
159 if fname and fname.endswith(b'.gz'):
160 fname = fname[:-3]
161 flags = 0
162 if fname:
163 flags = gzip.FNAME # pytype: disable=module-attr
164 self.fileobj.write(pycompat.bytechr(flags))
165 gzip.write32u( # pytype: disable=module-attr
166 self.fileobj, int(self.timestamp)
167 )
168 self.fileobj.write(b'\002')
169 self.fileobj.write(b'\377')
170 if fname:
171 self.fileobj.write(fname + b'\000')
172
173 def __init__(self, dest, mtime, kind=b''):
139 def __init__(self, dest, mtime, kind=b''):
174 self.mtime = mtime
140 self.mtime = mtime
175 self.fileobj = None
141 self.fileobj = None
@@ -179,7 +145,7 b' class tarit(object):'
179 mode = mode[0:1]
145 mode = mode[0:1]
180 if not fileobj:
146 if not fileobj:
181 fileobj = open(name, mode + b'b')
147 fileobj = open(name, mode + b'b')
182 gzfileobj = self.GzipFileWithTime(
148 gzfileobj = gzip.GzipFile(
183 name,
149 name,
184 pycompat.sysstr(mode + b'b'),
150 pycompat.sysstr(mode + b'b'),
185 zlib.Z_BEST_COMPRESSION,
151 zlib.Z_BEST_COMPRESSION,
@@ -227,7 +193,7 b' class tarit(object):'
227 self.fileobj.close()
193 self.fileobj.close()
228
194
229
195
230 class zipit(object):
196 class zipit:
231 """write archive to zip file or stream. can write uncompressed,
197 """write archive to zip file or stream. can write uncompressed,
232 or compressed with deflate."""
198 or compressed with deflate."""
233
199
@@ -274,7 +240,7 b' class zipit(object):'
274 self.z.close()
240 self.z.close()
275
241
276
242
277 class fileit(object):
243 class fileit:
278 '''write archive as files in directory.'''
244 '''write archive as files in directory.'''
279
245
280 def __init__(self, name, mtime):
246 def __init__(self, name, mtime):
@@ -339,9 +305,6 b' def archive('
339 subrepos tells whether to include subrepos.
305 subrepos tells whether to include subrepos.
340 """
306 """
341
307
342 if kind == b'txz' and not pycompat.ispy3:
343 raise error.Abort(_(b'xz compression is only available in Python 3'))
344
345 if kind == b'files':
308 if kind == b'files':
346 if prefix:
309 if prefix:
347 raise error.Abort(_(b'cannot give prefix when archiving to files'))
310 raise error.Abort(_(b'cannot give prefix when archiving to files'))
@@ -5,9 +5,7 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import errno
11 import struct
9 import struct
12
10
13 from .i18n import _
11 from .i18n import _
@@ -28,6 +26,7 b' from . import ('
28 util,
26 util,
29 )
27 )
30 from .utils import (
28 from .utils import (
29 stringutil,
31 urlutil,
30 urlutil,
32 )
31 )
33
32
@@ -59,7 +58,7 b' def _getbkfile(repo):'
59 return fp
58 return fp
60
59
61
60
62 class bmstore(object):
61 class bmstore:
63 r"""Storage for bookmarks.
62 r"""Storage for bookmarks.
64
63
65 This object should do all bookmark-related reads and writes, so
64 This object should do all bookmark-related reads and writes, so
@@ -101,8 +100,8 b' class bmstore(object):'
101 if nrefs[-2] > refspec:
100 if nrefs[-2] > refspec:
102 # bookmarks weren't sorted before 4.5
101 # bookmarks weren't sorted before 4.5
103 nrefs.sort()
102 nrefs.sort()
104 except (TypeError, ValueError):
103 except ValueError:
105 # TypeError:
104 # binascii.Error (ValueError subclass):
106 # - bin(...)
105 # - bin(...)
107 # ValueError:
106 # ValueError:
108 # - node in nm, for non-20-bytes entry
107 # - node in nm, for non-20-bytes entry
@@ -114,9 +113,8 b' class bmstore(object):'
114 _(b'malformed line in %s: %r\n')
113 _(b'malformed line in %s: %r\n')
115 % (bookmarkspath, pycompat.bytestr(line))
114 % (bookmarkspath, pycompat.bytestr(line))
116 )
115 )
117 except IOError as inst:
116 except FileNotFoundError:
118 if inst.errno != errno.ENOENT:
117 pass
119 raise
120 self._active = _readactive(repo, self)
118 self._active = _readactive(repo, self)
121
119
122 @property
120 @property
@@ -138,7 +136,7 b' class bmstore(object):'
138 return iter(self._refmap)
136 return iter(self._refmap)
139
137
140 def iteritems(self):
138 def iteritems(self):
141 return pycompat.iteritems(self._refmap)
139 return self._refmap.items()
142
140
143 def items(self):
141 def items(self):
144 return self._refmap.items()
142 return self._refmap.items()
@@ -251,7 +249,7 b' class bmstore(object):'
251 self._aclean = True
249 self._aclean = True
252
250
253 def _write(self, fp):
251 def _write(self, fp):
254 for name, node in sorted(pycompat.iteritems(self._refmap)):
252 for name, node in sorted(self._refmap.items()):
255 fp.write(b"%s %s\n" % (hex(node), encoding.fromlocal(name)))
253 fp.write(b"%s %s\n" % (hex(node), encoding.fromlocal(name)))
256 self._clean = True
254 self._clean = True
257 self._repo.invalidatevolatilesets()
255 self._repo.invalidatevolatilesets()
@@ -343,7 +341,7 b' def _readactive(repo, marks):'
343 # No readline() in osutil.posixfile, reading everything is
341 # No readline() in osutil.posixfile, reading everything is
344 # cheap.
342 # cheap.
345 content = repo.vfs.tryread(b'bookmarks.current')
343 content = repo.vfs.tryread(b'bookmarks.current')
346 mark = encoding.tolocal((content.splitlines() or [b''])[0])
344 mark = encoding.tolocal(stringutil.firstline(content))
347 if mark == b'' or mark not in marks:
345 if mark == b'' or mark not in marks:
348 mark = None
346 mark = None
349 return mark
347 return mark
@@ -419,7 +417,7 b' def headsforactive(repo):'
419 )
417 )
420 name = repo._activebookmark.split(b'@', 1)[0]
418 name = repo._activebookmark.split(b'@', 1)[0]
421 heads = []
419 heads = []
422 for mark, n in pycompat.iteritems(repo._bookmarks):
420 for mark, n in repo._bookmarks.items():
423 if mark.split(b'@', 1)[0] == name:
421 if mark.split(b'@', 1)[0] == name:
424 heads.append(n)
422 heads.append(n)
425 return heads
423 return heads
@@ -477,7 +475,7 b' def listbinbookmarks(repo):'
477 marks = getattr(repo, '_bookmarks', {})
475 marks = getattr(repo, '_bookmarks', {})
478
476
479 hasnode = repo.changelog.hasnode
477 hasnode = repo.changelog.hasnode
480 for k, v in pycompat.iteritems(marks):
478 for k, v in marks.items():
481 # don't expose local divergent bookmarks
479 # don't expose local divergent bookmarks
482 if hasnode(v) and not isdivergent(k):
480 if hasnode(v) and not isdivergent(k):
483 yield k, v
481 yield k, v
@@ -688,7 +686,7 b' def mirroring_remote(ui, repo, remotemar'
688 remotemarks"""
686 remotemarks"""
689 changed = []
687 changed = []
690 localmarks = repo._bookmarks
688 localmarks = repo._bookmarks
691 for (b, id) in pycompat.iteritems(remotemarks):
689 for (b, id) in remotemarks.items():
692 if id != localmarks.get(b, None) and id in repo:
690 if id != localmarks.get(b, None) and id in repo:
693 changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b))
691 changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b))
694 for b in localmarks:
692 for b in localmarks:
@@ -1075,7 +1073,7 b' def _printbookmarks(ui, repo, fm, bmarks'
1075 hexfn = fm.hexfunc
1073 hexfn = fm.hexfunc
1076 if len(bmarks) == 0 and fm.isplain():
1074 if len(bmarks) == 0 and fm.isplain():
1077 ui.status(_(b"no bookmarks set\n"))
1075 ui.status(_(b"no bookmarks set\n"))
1078 for bmark, (n, prefix, label) in sorted(pycompat.iteritems(bmarks)):
1076 for bmark, (n, prefix, label) in sorted(bmarks.items()):
1079 fm.startitem()
1077 fm.startitem()
1080 fm.context(repo=repo)
1078 fm.context(repo=repo)
1081 if not ui.quiet:
1079 if not ui.quiet:
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import struct
9 import struct
11
10
@@ -63,7 +62,7 b' pack_into = struct.pack_into'
63 unpack_from = struct.unpack_from
62 unpack_from = struct.unpack_from
64
63
65
64
66 class BranchMapCache(object):
65 class BranchMapCache:
67 """mapping of filtered views of repo with their branchcache"""
66 """mapping of filtered views of repo with their branchcache"""
68
67
69 def __init__(self):
68 def __init__(self):
@@ -120,7 +119,7 b' class BranchMapCache(object):'
120 clbranchinfo = cl.branchinfo
119 clbranchinfo = cl.branchinfo
121 rbheads = []
120 rbheads = []
122 closed = set()
121 closed = set()
123 for bheads in pycompat.itervalues(remotebranchmap):
122 for bheads in remotebranchmap.values():
124 rbheads += bheads
123 rbheads += bheads
125 for h in bheads:
124 for h in bheads:
126 r = clrev(h)
125 r = clrev(h)
@@ -160,7 +159,7 b' class BranchMapCache(object):'
160
159
161 def _unknownnode(node):
160 def _unknownnode(node):
162 """raises ValueError when branchcache found a node which does not exists"""
161 """raises ValueError when branchcache found a node which does not exists"""
163 raise ValueError('node %s does not exist' % pycompat.sysstr(hex(node)))
162 raise ValueError('node %s does not exist' % node.hex())
164
163
165
164
166 def _branchcachedesc(repo):
165 def _branchcachedesc(repo):
@@ -170,7 +169,7 b' def _branchcachedesc(repo):'
170 return b'branch cache'
169 return b'branch cache'
171
170
172
171
173 class branchcache(object):
172 class branchcache:
174 """A dict like object that hold branches heads cache.
173 """A dict like object that hold branches heads cache.
175
174
176 This cache is used to avoid costly computations to determine all the
175 This cache is used to avoid costly computations to determine all the
@@ -271,7 +270,7 b' class branchcache(object):'
271 return key in self._entries
270 return key in self._entries
272
271
273 def iteritems(self):
272 def iteritems(self):
274 for k, v in pycompat.iteritems(self._entries):
273 for k, v in self._entries.items():
275 self._verifybranch(k)
274 self._verifybranch(k)
276 yield k, v
275 yield k, v
277
276
@@ -401,13 +400,13 b' class branchcache(object):'
401 return heads
400 return heads
402
401
403 def iterbranches(self):
402 def iterbranches(self):
404 for bn, heads in pycompat.iteritems(self):
403 for bn, heads in self.items():
405 yield (bn, heads) + self._branchtip(heads)
404 yield (bn, heads) + self._branchtip(heads)
406
405
407 def iterheads(self):
406 def iterheads(self):
408 """returns all the heads"""
407 """returns all the heads"""
409 self._verifyall()
408 self._verifyall()
410 return pycompat.itervalues(self._entries)
409 return self._entries.values()
411
410
412 def copy(self):
411 def copy(self):
413 """return an deep copy of the branchcache object"""
412 """return an deep copy of the branchcache object"""
@@ -429,22 +428,22 b' class branchcache(object):'
429 self._delayed = True
428 self._delayed = True
430 return
429 return
431 try:
430 try:
432 f = repo.cachevfs(self._filename(repo), b"w", atomictemp=True)
431 filename = self._filename(repo)
433 cachekey = [hex(self.tipnode), b'%d' % self.tiprev]
432 with repo.cachevfs(filename, b"w", atomictemp=True) as f:
434 if self.filteredhash is not None:
433 cachekey = [hex(self.tipnode), b'%d' % self.tiprev]
435 cachekey.append(hex(self.filteredhash))
434 if self.filteredhash is not None:
436 f.write(b" ".join(cachekey) + b'\n')
435 cachekey.append(hex(self.filteredhash))
437 nodecount = 0
436 f.write(b" ".join(cachekey) + b'\n')
438 for label, nodes in sorted(pycompat.iteritems(self._entries)):
437 nodecount = 0
439 label = encoding.fromlocal(label)
438 for label, nodes in sorted(self._entries.items()):
440 for node in nodes:
439 label = encoding.fromlocal(label)
441 nodecount += 1
440 for node in nodes:
442 if node in self._closednodes:
441 nodecount += 1
443 state = b'c'
442 if node in self._closednodes:
444 else:
443 state = b'c'
445 state = b'o'
444 else:
446 f.write(b"%s %s %s\n" % (hex(node), state, label))
445 state = b'o'
447 f.close()
446 f.write(b"%s %s %s\n" % (hex(node), state, label))
448 repo.ui.log(
447 repo.ui.log(
449 b'branchcache',
448 b'branchcache',
450 b'wrote %s with %d labels and %d nodes\n',
449 b'wrote %s with %d labels and %d nodes\n',
@@ -491,7 +490,7 b' class branchcache(object):'
491 # Faster than using ctx.obsolete()
490 # Faster than using ctx.obsolete()
492 obsrevs = obsolete.getrevs(repo, b'obsolete')
491 obsrevs = obsolete.getrevs(repo, b'obsolete')
493
492
494 for branch, newheadrevs in pycompat.iteritems(newbranches):
493 for branch, newheadrevs in newbranches.items():
495 # For every branch, compute the new branchheads.
494 # For every branch, compute the new branchheads.
496 # A branchhead is a revision such that no descendant is on
495 # A branchhead is a revision such that no descendant is on
497 # the same branch.
496 # the same branch.
@@ -632,7 +631,7 b' class remotebranchcache(branchcache):'
632 _rbccloseflag = 0x80000000
631 _rbccloseflag = 0x80000000
633
632
634
633
635 class revbranchcache(object):
634 class revbranchcache:
636 """Persistent cache, mapping from revision number to branch name and close.
635 """Persistent cache, mapping from revision number to branch name and close.
637 This is a low level cache, independent of filtering.
636 This is a low level cache, independent of filtering.
638
637
@@ -145,7 +145,6 b' future, dropping the stream may become a'
145 preserve.
145 preserve.
146 """
146 """
147
147
148 from __future__ import absolute_import, division
149
148
150 import collections
149 import collections
151 import errno
150 import errno
@@ -252,7 +251,7 b' def parthandler(parttype, params=()):'
252 return _decorator
251 return _decorator
253
252
254
253
255 class unbundlerecords(object):
254 class unbundlerecords:
256 """keep record of what happens during and unbundle
255 """keep record of what happens during and unbundle
257
256
258 New records are added using `records.add('cat', obj)`. Where 'cat' is a
257 New records are added using `records.add('cat', obj)`. Where 'cat' is a
@@ -300,7 +299,7 b' class unbundlerecords(object):'
300 __bool__ = __nonzero__
299 __bool__ = __nonzero__
301
300
302
301
303 class bundleoperation(object):
302 class bundleoperation:
304 """an object that represents a single bundling process
303 """an object that represents a single bundling process
305
304
306 Its purpose is to carry unbundle-related objects and states.
305 Its purpose is to carry unbundle-related objects and states.
@@ -380,7 +379,7 b' def applybundle(repo, unbundler, tr, sou'
380 return op
379 return op
381
380
382
381
383 class partiterator(object):
382 class partiterator:
384 def __init__(self, repo, op, unbundler):
383 def __init__(self, repo, op, unbundler):
385 self.repo = repo
384 self.repo = repo
386 self.op = op
385 self.op = op
@@ -627,7 +626,7 b' bundletypes = {'
627 bundlepriority = [b'HG10GZ', b'HG10BZ', b'HG10UN']
626 bundlepriority = [b'HG10GZ', b'HG10BZ', b'HG10UN']
628
627
629
628
630 class bundle20(object):
629 class bundle20:
631 """represent an outgoing bundle2 container
630 """represent an outgoing bundle2 container
632
631
633 Use the `addparam` method to add stream level parameter. and `newpart` to
632 Use the `addparam` method to add stream level parameter. and `newpart` to
@@ -751,7 +750,7 b' class bundle20(object):'
751 return salvaged
750 return salvaged
752
751
753
752
754 class unpackermixin(object):
753 class unpackermixin:
755 """A mixin to extract bytes and struct data from a stream"""
754 """A mixin to extract bytes and struct data from a stream"""
756
755
757 def __init__(self, fp):
756 def __init__(self, fp):
@@ -984,7 +983,7 b' def processcompression(unbundler, param,'
984 unbundler._compressed = True
983 unbundler._compressed = True
985
984
986
985
987 class bundlepart(object):
986 class bundlepart:
988 """A bundle2 part contains application level payload
987 """A bundle2 part contains application level payload
989
988
990 The part `type` is used to route the part to the application level
989 The part `type` is used to route the part to the application level
@@ -1274,7 +1273,7 b' class interrupthandler(unpackermixin):'
1274 )
1273 )
1275
1274
1276
1275
1277 class interruptoperation(object):
1276 class interruptoperation:
1278 """A limited operation to be use by part handler during interruption
1277 """A limited operation to be use by part handler during interruption
1279
1278
1280 It only have access to an ui object.
1279 It only have access to an ui object.
@@ -1693,7 +1692,7 b' def writenewbundle('
1693 raise error.ProgrammingError(b'unknown bundle type: %s' % bundletype)
1692 raise error.ProgrammingError(b'unknown bundle type: %s' % bundletype)
1694
1693
1695 caps = {}
1694 caps = {}
1696 if b'obsolescence' in opts:
1695 if opts.get(b'obsolescence', False):
1697 caps[b'obsmarkers'] = (b'V1',)
1696 caps[b'obsmarkers'] = (b'V1',)
1698 bundle = bundle20(ui, caps)
1697 bundle = bundle20(ui, caps)
1699 bundle.setcompression(compression, compopts)
1698 bundle.setcompression(compression, compopts)
@@ -2240,7 +2239,7 b' def handlecheckphases(op, inpart):'
2240 b'remote repository changed while pushing - please try again '
2239 b'remote repository changed while pushing - please try again '
2241 b'(%s is %s expected %s)'
2240 b'(%s is %s expected %s)'
2242 )
2241 )
2243 for expectedphase, nodes in pycompat.iteritems(phasetonodes):
2242 for expectedphase, nodes in phasetonodes.items():
2244 for n in nodes:
2243 for n in nodes:
2245 actualphase = phasecache.phase(unfi, cl.rev(n))
2244 actualphase = phasecache.phase(unfi, cl.rev(n))
2246 if actualphase != expectedphase:
2245 if actualphase != expectedphase:
@@ -3,6 +3,8 b''
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 import collections
7
6 from .i18n import _
8 from .i18n import _
7
9
8 from .thirdparty import attr
10 from .thirdparty import attr
@@ -21,13 +23,33 b" CB_MANIFEST_FILE = b'clonebundles.manife"
21
23
22
24
23 @attr.s
25 @attr.s
24 class bundlespec(object):
26 class bundlespec:
25 compression = attr.ib()
27 compression = attr.ib()
26 wirecompression = attr.ib()
28 wirecompression = attr.ib()
27 version = attr.ib()
29 version = attr.ib()
28 wireversion = attr.ib()
30 wireversion = attr.ib()
29 params = attr.ib()
31 # parameters explicitly overwritten by the config or the specification
30 contentopts = attr.ib()
32 _explicit_params = attr.ib()
33 # default parameter for the version
34 #
35 # Keeping it separated is useful to check what was actually overwritten.
36 _default_opts = attr.ib()
37
38 @property
39 def params(self):
40 return collections.ChainMap(self._explicit_params, self._default_opts)
41
42 @property
43 def contentopts(self):
44 # kept for Backward Compatibility concerns.
45 return self.params
46
47 def set_param(self, key, value, overwrite=True):
48 """Set a bundle parameter value.
49
50 Will only overwrite if overwrite is true"""
51 if overwrite or key not in self._explicit_params:
52 self._explicit_params[key] = value
31
53
32
54
33 # Maps bundle version human names to changegroup versions.
55 # Maps bundle version human names to changegroup versions.
@@ -56,23 +78,78 b' class bundlespec(object):'
56 b'tagsfnodescache': True,
78 b'tagsfnodescache': True,
57 b'revbranchcache': True,
79 b'revbranchcache': True,
58 },
80 },
59 b'packed1': {b'cg.version': b's1'},
81 b'streamv2': {
82 b'changegroup': False,
83 b'cg.version': b'02',
84 b'obsolescence': False,
85 b'phases': False,
86 b"streamv2": True,
87 b'tagsfnodescache': False,
88 b'revbranchcache': False,
89 },
90 b'packed1': {
91 b'cg.version': b's1',
92 },
93 b'bundle2': { # legacy
94 b'cg.version': b'02',
95 },
60 }
96 }
61 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
97 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
62
98
63 _bundlespecvariants = {
99 _bundlespecvariants = {b"streamv2": {}}
64 b"streamv2": {
65 b"changegroup": False,
66 b"streamv2": True,
67 b"tagsfnodescache": False,
68 b"revbranchcache": False,
69 }
70 }
71
100
72 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
101 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
73 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
102 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
74
103
75
104
105 def param_bool(key, value):
106 """make a boolean out of a parameter value"""
107 b = stringutil.parsebool(value)
108 if b is None:
109 msg = _(b"parameter %s should be a boolean ('%s')")
110 msg %= (key, value)
111 raise error.InvalidBundleSpecification(msg)
112 return b
113
114
115 # mapping of known parameter name need their value processed
116 bundle_spec_param_processing = {
117 b"obsolescence": param_bool,
118 b"obsolescence-mandatory": param_bool,
119 b"phases": param_bool,
120 }
121
122
123 def _parseparams(s):
124 """parse bundlespec parameter section
125
126 input: "comp-version;params" string
127
128 return: (spec; {param_key: param_value})
129 """
130 if b';' not in s:
131 return s, {}
132
133 params = {}
134 version, paramstr = s.split(b';', 1)
135
136 err = _(b'invalid bundle specification: missing "=" in parameter: %s')
137 for p in paramstr.split(b';'):
138 if b'=' not in p:
139 msg = err % p
140 raise error.InvalidBundleSpecification(msg)
141
142 key, value = p.split(b'=', 1)
143 key = urlreq.unquote(key)
144 value = urlreq.unquote(value)
145 process = bundle_spec_param_processing.get(key)
146 if process is not None:
147 value = process(key, value)
148 params[key] = value
149
150 return version, params
151
152
76 def parsebundlespec(repo, spec, strict=True):
153 def parsebundlespec(repo, spec, strict=True):
77 """Parse a bundle string specification into parts.
154 """Parse a bundle string specification into parts.
78
155
@@ -106,31 +183,6 b' def parsebundlespec(repo, spec, strict=T'
106 Note: this function will likely eventually return a more complex data
183 Note: this function will likely eventually return a more complex data
107 structure, including bundle2 part information.
184 structure, including bundle2 part information.
108 """
185 """
109
110 def parseparams(s):
111 if b';' not in s:
112 return s, {}
113
114 params = {}
115 version, paramstr = s.split(b';', 1)
116
117 for p in paramstr.split(b';'):
118 if b'=' not in p:
119 raise error.InvalidBundleSpecification(
120 _(
121 b'invalid bundle specification: '
122 b'missing "=" in parameter: %s'
123 )
124 % p
125 )
126
127 key, value = p.split(b'=', 1)
128 key = urlreq.unquote(key)
129 value = urlreq.unquote(value)
130 params[key] = value
131
132 return version, params
133
134 if strict and b'-' not in spec:
186 if strict and b'-' not in spec:
135 raise error.InvalidBundleSpecification(
187 raise error.InvalidBundleSpecification(
136 _(
188 _(
@@ -140,7 +192,8 b' def parsebundlespec(repo, spec, strict=T'
140 % spec
192 % spec
141 )
193 )
142
194
143 if b'-' in spec:
195 pre_args = spec.split(b';', 1)[0]
196 if b'-' in pre_args:
144 compression, version = spec.split(b'-', 1)
197 compression, version = spec.split(b'-', 1)
145
198
146 if compression not in util.compengines.supportedbundlenames:
199 if compression not in util.compengines.supportedbundlenames:
@@ -148,9 +201,9 b' def parsebundlespec(repo, spec, strict=T'
148 _(b'%s compression is not supported') % compression
201 _(b'%s compression is not supported') % compression
149 )
202 )
150
203
151 version, params = parseparams(version)
204 version, params = _parseparams(version)
152
205
153 if version not in _bundlespeccgversions:
206 if version not in _bundlespeccontentopts:
154 raise error.UnsupportedBundleSpecification(
207 raise error.UnsupportedBundleSpecification(
155 _(b'%s is not a recognized bundle version') % version
208 _(b'%s is not a recognized bundle version') % version
156 )
209 )
@@ -159,7 +212,7 b' def parsebundlespec(repo, spec, strict=T'
159 # case some defaults are assumed (but only when not in strict mode).
212 # case some defaults are assumed (but only when not in strict mode).
160 assert not strict
213 assert not strict
161
214
162 spec, params = parseparams(spec)
215 spec, params = _parseparams(spec)
163
216
164 if spec in util.compengines.supportedbundlenames:
217 if spec in util.compengines.supportedbundlenames:
165 compression = spec
218 compression = spec
@@ -172,7 +225,7 b' def parsebundlespec(repo, spec, strict=T'
172 # Modern compression engines require v2.
225 # Modern compression engines require v2.
173 if compression not in _bundlespecv1compengines:
226 if compression not in _bundlespecv1compengines:
174 version = b'v2'
227 version = b'v2'
175 elif spec in _bundlespeccgversions:
228 elif spec in _bundlespeccontentopts:
176 if spec == b'packed1':
229 if spec == b'packed1':
177 compression = b'none'
230 compression = b'none'
178 else:
231 else:
@@ -203,16 +256,25 b' def parsebundlespec(repo, spec, strict=T'
203 )
256 )
204
257
205 # Compute contentopts based on the version
258 # Compute contentopts based on the version
259 if b"stream" in params and params[b"stream"] == b"v2":
260 # That case is fishy as this mostly derails the version selection
261 # mechanism. `stream` bundles are quite specific and used differently
262 # as "normal" bundles.
263 #
264 # So we are pinning this to "v2", as this will likely be
265 # compatible forever. (see the next conditional).
266 #
267 # (we should probably define a cleaner way to do this and raise a
268 # warning when the old way is encounter)
269 version = b"streamv2"
206 contentopts = _bundlespeccontentopts.get(version, {}).copy()
270 contentopts = _bundlespeccontentopts.get(version, {}).copy()
207
271 if version == b"streamv2":
208 # Process the variants
272 # streamv2 have been reported as "v2" for a while.
209 if b"stream" in params and params[b"stream"] == b"v2":
273 version = b"v2"
210 variant = _bundlespecvariants[b"streamv2"]
211 contentopts.update(variant)
212
274
213 engine = util.compengines.forbundlename(compression)
275 engine = util.compengines.forbundlename(compression)
214 compression, wirecompression = engine.bundletype()
276 compression, wirecompression = engine.bundletype()
215 wireversion = _bundlespeccgversions[version]
277 wireversion = _bundlespeccontentopts[version][b'cg.version']
216
278
217 return bundlespec(
279 return bundlespec(
218 compression, wirecompression, version, wireversion, params, contentopts
280 compression, wirecompression, version, wireversion, params, contentopts
@@ -343,7 +405,7 b' def filterclonebundleentries(repo, entri'
343 return newentries
405 return newentries
344
406
345
407
346 class clonebundleentry(object):
408 class clonebundleentry:
347 """Represents an item in a clone bundles manifest.
409 """Represents an item in a clone bundles manifest.
348
410
349 This rich class is needed to support sorting since sorted() in Python 3
411 This rich class is needed to support sorting since sorted() in Python 3
@@ -11,7 +11,6 b' This provides a read-only repository int'
11 were part of the actual repository.
11 were part of the actual repository.
12 """
12 """
13
13
14 from __future__ import absolute_import
15
14
16 import os
15 import os
17 import shutil
16 import shutil
@@ -271,7 +270,7 b' def _getfilestarts(cgunpacker):'
271 return filespos
270 return filespos
272
271
273
272
274 class bundlerepository(object):
273 class bundlerepository:
275 """A repository instance that is a union of a local repo and a bundle.
274 """A repository instance that is a union of a local repo and a bundle.
276
275
277 Instances represent a read-only repository composed of a local repository
276 Instances represent a read-only repository composed of a local repository
@@ -551,7 +550,7 b' def makebundlerepository(ui, repopath, b'
551 return repo
550 return repo
552
551
553
552
554 class bundletransactionmanager(object):
553 class bundletransactionmanager:
555 def transaction(self):
554 def transaction(self):
556 return None
555 return None
557
556
@@ -4,7 +4,6 b''
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
8
7
9 from . import repoview
8 from . import repoview
10
9
@@ -38,7 +38,7 b' static PyObject *b85encode(PyObject *sel'
38 unsigned int acc, val, ch;
38 unsigned int acc, val, ch;
39 int pad = 0;
39 int pad = 0;
40
40
41 if (!PyArg_ParseTuple(args, PY23("s#|i", "y#|i"), &text, &len, &pad)) {
41 if (!PyArg_ParseTuple(args, "y#|i", &text, &len, &pad)) {
42 return NULL;
42 return NULL;
43 }
43 }
44
44
@@ -90,7 +90,7 b' static PyObject *b85decode(PyObject *sel'
90 int c;
90 int c;
91 unsigned int acc;
91 unsigned int acc;
92
92
93 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &len)) {
93 if (!PyArg_ParseTuple(args, "y#", &text, &len)) {
94 return NULL;
94 return NULL;
95 }
95 }
96
96
@@ -177,7 +177,6 b' static PyMethodDef methods[] = {'
177
177
178 static const int version = 1;
178 static const int version = 1;
179
179
180 #ifdef IS_PY3K
181 static struct PyModuleDef base85_module = {
180 static struct PyModuleDef base85_module = {
182 PyModuleDef_HEAD_INIT, "base85", base85_doc, -1, methods,
181 PyModuleDef_HEAD_INIT, "base85", base85_doc, -1, methods,
183 };
182 };
@@ -191,13 +190,3 b' PyMODINIT_FUNC PyInit_base85(void)'
191 PyModule_AddIntConstant(m, "version", version);
190 PyModule_AddIntConstant(m, "version", version);
192 return m;
191 return m;
193 }
192 }
194 #else
195 PyMODINIT_FUNC initbase85(void)
196 {
197 PyObject *m;
198 m = Py_InitModule3("base85", methods, base85_doc);
199
200 b85prep();
201 PyModule_AddIntConstant(m, "version", version);
202 }
203 #endif
@@ -76,8 +76,7 b' static PyObject *bdiff(PyObject *self, P'
76
76
77 l.next = NULL;
77 l.next = NULL;
78
78
79 if (!PyArg_ParseTuple(args, PY23("s*s*:bdiff", "y*y*:bdiff"), &ba,
79 if (!PyArg_ParseTuple(args, "y*y*:bdiff", &ba, &bb)) {
80 &bb)) {
81 return NULL;
80 return NULL;
82 }
81 }
83
82
@@ -233,7 +232,7 b' static PyObject *splitnewlines(PyObject '
233 Py_ssize_t nelts = 0, size, i, start = 0;
232 Py_ssize_t nelts = 0, size, i, start = 0;
234 PyObject *result = NULL;
233 PyObject *result = NULL;
235
234
236 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &text, &size)) {
235 if (!PyArg_ParseTuple(args, "y#", &text, &size)) {
237 goto abort;
236 goto abort;
238 }
237 }
239 if (!size) {
238 if (!size) {
@@ -299,8 +298,7 b' static PyObject *xdiffblocks(PyObject *s'
299 NULL, /* priv */
298 NULL, /* priv */
300 };
299 };
301
300
302 if (!PyArg_ParseTuple(args, PY23("s#s#", "y#y#"), &a.ptr, &la, &b.ptr,
301 if (!PyArg_ParseTuple(args, "y#y#", &a.ptr, &la, &b.ptr, &lb)) {
303 &lb)) {
304 return NULL;
302 return NULL;
305 }
303 }
306
304
@@ -337,7 +335,6 b' static PyMethodDef methods[] = {'
337
335
338 static const int version = 3;
336 static const int version = 3;
339
337
340 #ifdef IS_PY3K
341 static struct PyModuleDef bdiff_module = {
338 static struct PyModuleDef bdiff_module = {
342 PyModuleDef_HEAD_INIT, "bdiff", mdiff_doc, -1, methods,
339 PyModuleDef_HEAD_INIT, "bdiff", mdiff_doc, -1, methods,
343 };
340 };
@@ -349,11 +346,3 b' PyMODINIT_FUNC PyInit_bdiff(void)'
349 PyModule_AddIntConstant(m, "version", version);
346 PyModule_AddIntConstant(m, "version", version);
350 return m;
347 return m;
351 }
348 }
352 #else
353 PyMODINIT_FUNC initbdiff(void)
354 {
355 PyObject *m;
356 m = Py_InitModule3("bdiff", methods, mdiff_doc);
357 PyModule_AddIntConstant(m, "version", version);
358 }
359 #endif
@@ -15,14 +15,6 b''
15 #include "compat.h"
15 #include "compat.h"
16 #include "util.h"
16 #include "util.h"
17
17
18 #ifdef IS_PY3K
19 /* The mapping of Python types is meant to be temporary to get Python
20 * 3 to compile. We should remove this once Python 3 support is fully
21 * supported and proper types are used in the extensions themselves. */
22 #define PyInt_Type PyLong_Type
23 #define PyInt_AS_LONG PyLong_AS_LONG
24 #endif
25
26 /* clang-format off */
18 /* clang-format off */
27 static const char lowertable[128] = {
19 static const char lowertable[128] = {
28 '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
20 '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07',
@@ -133,8 +125,7 b' PyObject *isasciistr(PyObject *self, PyO'
133 {
125 {
134 const char *buf;
126 const char *buf;
135 Py_ssize_t i, len;
127 Py_ssize_t i, len;
136 if (!PyArg_ParseTuple(args, PY23("s#:isasciistr", "y#:isasciistr"),
128 if (!PyArg_ParseTuple(args, "y#:isasciistr", &buf, &len)) {
137 &buf, &len)) {
138 return NULL;
129 return NULL;
139 }
130 }
140 i = 0;
131 i = 0;
@@ -228,12 +219,12 b' PyObject *make_file_foldmap(PyObject *se'
228 const char *table;
219 const char *table;
229
220
230 if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap", &PyDict_Type,
221 if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap", &PyDict_Type,
231 &dmap, &PyInt_Type, &spec_obj, &PyFunction_Type,
222 &dmap, &PyLong_Type, &spec_obj, &PyFunction_Type,
232 &normcase_fallback)) {
223 &normcase_fallback)) {
233 goto quit;
224 goto quit;
234 }
225 }
235
226
236 spec = (int)PyInt_AS_LONG(spec_obj);
227 spec = (int)PyLong_AS_LONG(spec_obj);
237 switch (spec) {
228 switch (spec) {
238 case NORMCASE_LOWER:
229 case NORMCASE_LOWER:
239 table = lowertable;
230 table = lowertable;
@@ -13,11 +13,7 b''
13
13
14 #include "util.h"
14 #include "util.h"
15
15
16 #ifdef IS_PY3K
17 #define PYLONG_VALUE(o) ((PyLongObject *)o)->ob_digit[0]
16 #define PYLONG_VALUE(o) ((PyLongObject *)o)->ob_digit[0]
18 #else
19 #define PYLONG_VALUE(o) PyInt_AS_LONG(o)
20 #endif
21
17
22 /*
18 /*
23 * This is a multiset of directory names, built from the files that
19 * This is a multiset of directory names, built from the files that
@@ -100,11 +96,7 b' static int _addpath(PyObject *dirs, PyOb'
100 }
96 }
101
97
102 /* Force Python to not reuse a small shared int. */
98 /* Force Python to not reuse a small shared int. */
103 #ifdef IS_PY3K
104 val = PyLong_FromLong(0x1eadbeef);
99 val = PyLong_FromLong(0x1eadbeef);
105 #else
106 val = PyInt_FromLong(0x1eadbeef);
107 #endif
108
100
109 if (val == NULL)
101 if (val == NULL)
110 goto bail;
102 goto bail;
@@ -317,12 +317,7 b' done:'
317 return ret;
317 return ret;
318 }
318 }
319
319
320 #ifdef IS_PY3K
321 #define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
320 #define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
322 #else
323 #define LAZYMANIFESTENTRIESITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
324 | Py_TPFLAGS_HAVE_ITER
325 #endif
326
321
327 static PyTypeObject lazymanifestEntriesIterator = {
322 static PyTypeObject lazymanifestEntriesIterator = {
328 PyVarObject_HEAD_INIT(NULL, 0) /* header */
323 PyVarObject_HEAD_INIT(NULL, 0) /* header */
@@ -365,12 +360,7 b' static PyObject *lmiter_iterkeysnext(PyO'
365 return PyBytes_FromStringAndSize(l->start, pl);
360 return PyBytes_FromStringAndSize(l->start, pl);
366 }
361 }
367
362
368 #ifdef IS_PY3K
369 #define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
363 #define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT
370 #else
371 #define LAZYMANIFESTKEYSITERATOR_TPFLAGS Py_TPFLAGS_DEFAULT \
372 | Py_TPFLAGS_HAVE_ITER
373 #endif
374
364
375 static PyTypeObject lazymanifestKeysIterator = {
365 static PyTypeObject lazymanifestKeysIterator = {
376 PyVarObject_HEAD_INIT(NULL, 0) /* header */
366 PyVarObject_HEAD_INIT(NULL, 0) /* header */
@@ -790,7 +780,7 b' static lazymanifest *lazymanifest_filter'
790 Py_INCREF(copy->pydata);
780 Py_INCREF(copy->pydata);
791 for (i = 0; i < self->numlines; i++) {
781 for (i = 0; i < self->numlines; i++) {
792 PyObject *arglist = NULL, *result = NULL;
782 PyObject *arglist = NULL, *result = NULL;
793 arglist = Py_BuildValue(PY23("(s)", "(y)"),
783 arglist = Py_BuildValue("(y)",
794 self->lines[i].start);
784 self->lines[i].start);
795 if (!arglist) {
785 if (!arglist) {
796 goto bail;
786 goto bail;
@@ -955,11 +945,7 b' static PyMethodDef lazymanifest_methods['
955 {NULL},
945 {NULL},
956 };
946 };
957
947
958 #ifdef IS_PY3K
959 #define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT
948 #define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT
960 #else
961 #define LAZYMANIFEST_TPFLAGS Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_SEQUENCE_IN
962 #endif
963
949
964 static PyTypeObject lazymanifestType = {
950 static PyTypeObject lazymanifestType = {
965 PyVarObject_HEAD_INIT(NULL, 0) /* header */
951 PyVarObject_HEAD_INIT(NULL, 0) /* header */
@@ -144,8 +144,7 b' static PyObject *patchedsize(PyObject *s'
144 Py_ssize_t patchlen;
144 Py_ssize_t patchlen;
145 char *bin;
145 char *bin;
146
146
147 if (!PyArg_ParseTuple(args, PY23("ls#", "ly#"), &orig, &bin,
147 if (!PyArg_ParseTuple(args, "ly#", &orig, &bin, &patchlen)) {
148 &patchlen)) {
149 return NULL;
148 return NULL;
150 }
149 }
151
150
@@ -182,7 +181,6 b' static PyMethodDef methods[] = {'
182
181
183 static const int version = 1;
182 static const int version = 1;
184
183
185 #ifdef IS_PY3K
186 static struct PyModuleDef mpatch_module = {
184 static struct PyModuleDef mpatch_module = {
187 PyModuleDef_HEAD_INIT, "mpatch", mpatch_doc, -1, methods,
185 PyModuleDef_HEAD_INIT, "mpatch", mpatch_doc, -1, methods,
188 };
186 };
@@ -203,13 +201,3 b' PyMODINIT_FUNC PyInit_mpatch(void)'
203
201
204 return m;
202 return m;
205 }
203 }
206 #else
207 PyMODINIT_FUNC initmpatch(void)
208 {
209 PyObject *m;
210 m = Py_InitModule3("mpatch", methods, mpatch_doc);
211 mpatch_Error =
212 PyErr_NewException("mercurial.cext.mpatch.mpatchError", NULL, NULL);
213 PyModule_AddIntConstant(m, "version", version);
214 }
215 #endif
@@ -73,19 +73,11 b' struct listdir_stat {'
73 };
73 };
74 #endif
74 #endif
75
75
76 #ifdef IS_PY3K
77 #define listdir_slot(name) \
76 #define listdir_slot(name) \
78 static PyObject *listdir_stat_##name(PyObject *self, void *x) \
77 static PyObject *listdir_stat_##name(PyObject *self, void *x) \
79 { \
78 { \
80 return PyLong_FromLong(((struct listdir_stat *)self)->st.name); \
79 return PyLong_FromLong(((struct listdir_stat *)self)->st.name); \
81 }
80 }
82 #else
83 #define listdir_slot(name) \
84 static PyObject *listdir_stat_##name(PyObject *self, void *x) \
85 { \
86 return PyInt_FromLong(((struct listdir_stat *)self)->st.name); \
87 }
88 #endif
89
81
90 listdir_slot(st_dev)
82 listdir_slot(st_dev)
91 listdir_slot(st_mode)
83 listdir_slot(st_mode)
@@ -206,7 +198,7 b' static PyObject *make_item(const WIN32_F'
206 ? _S_IFDIR : _S_IFREG;
198 ? _S_IFDIR : _S_IFREG;
207
199
208 if (!wantstat)
200 if (!wantstat)
209 return Py_BuildValue(PY23("si", "yi"), fd->cFileName, kind);
201 return Py_BuildValue("yi", fd->cFileName, kind);
210
202
211 py_st = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
203 py_st = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL);
212 if (!py_st)
204 if (!py_st)
@@ -224,7 +216,7 b' static PyObject *make_item(const WIN32_F'
224 if (kind == _S_IFREG)
216 if (kind == _S_IFREG)
225 stp->st_size = ((__int64)fd->nFileSizeHigh << 32)
217 stp->st_size = ((__int64)fd->nFileSizeHigh << 32)
226 + fd->nFileSizeLow;
218 + fd->nFileSizeLow;
227 return Py_BuildValue(PY23("siN", "yiN"), fd->cFileName,
219 return Py_BuildValue("yiN", fd->cFileName,
228 kind, py_st);
220 kind, py_st);
229 }
221 }
230
222
@@ -412,10 +404,10 b' static PyObject *_listdir_stat(char *pat'
412 PyObject *stat = makestat(&st);
404 PyObject *stat = makestat(&st);
413 if (!stat)
405 if (!stat)
414 goto error;
406 goto error;
415 elem = Py_BuildValue(PY23("siN", "yiN"), ent->d_name,
407 elem = Py_BuildValue("yiN", ent->d_name,
416 kind, stat);
408 kind, stat);
417 } else
409 } else
418 elem = Py_BuildValue(PY23("si", "yi"), ent->d_name,
410 elem = Py_BuildValue("yi", ent->d_name,
419 kind);
411 kind);
420 if (!elem)
412 if (!elem)
421 goto error;
413 goto error;
@@ -593,10 +585,10 b' static PyObject *_listdir_batch(char *pa'
593 stat = makestat(&st);
585 stat = makestat(&st);
594 if (!stat)
586 if (!stat)
595 goto error;
587 goto error;
596 elem = Py_BuildValue(PY23("siN", "yiN"),
588 elem = Py_BuildValue("yiN",
597 filename, kind, stat);
589 filename, kind, stat);
598 } else
590 } else
599 elem = Py_BuildValue(PY23("si", "yi"),
591 elem = Py_BuildValue("yi",
600 filename, kind);
592 filename, kind);
601 if (!elem)
593 if (!elem)
602 goto error;
594 goto error;
@@ -693,84 +685,11 b' bail:'
693 return NULL;
685 return NULL;
694 }
686 }
695
687
696 /*
697 * recvfds() simply does not release GIL during blocking io operation because
698 * command server is known to be single-threaded.
699 *
700 * Old systems such as Solaris don't provide CMSG_LEN, msg_control, etc.
701 * Currently, recvfds() is not supported on these platforms.
702 */
703 #ifdef CMSG_LEN
704
705 static ssize_t recvfdstobuf(int sockfd, int **rfds, void *cbuf, size_t cbufsize)
706 {
707 char dummy[1];
708 struct iovec iov = {dummy, sizeof(dummy)};
709 struct msghdr msgh = {0};
710 struct cmsghdr *cmsg;
711
712 msgh.msg_iov = &iov;
713 msgh.msg_iovlen = 1;
714 msgh.msg_control = cbuf;
715 msgh.msg_controllen = (socklen_t)cbufsize;
716 if (recvmsg(sockfd, &msgh, 0) < 0)
717 return -1;
718
719 for (cmsg = CMSG_FIRSTHDR(&msgh); cmsg;
720 cmsg = CMSG_NXTHDR(&msgh, cmsg)) {
721 if (cmsg->cmsg_level != SOL_SOCKET ||
722 cmsg->cmsg_type != SCM_RIGHTS)
723 continue;
724 *rfds = (int *)CMSG_DATA(cmsg);
725 return (cmsg->cmsg_len - CMSG_LEN(0)) / sizeof(int);
726 }
727
728 *rfds = cbuf;
729 return 0;
730 }
731
732 static PyObject *recvfds(PyObject *self, PyObject *args)
733 {
734 int sockfd;
735 int *rfds = NULL;
736 ssize_t rfdscount, i;
737 char cbuf[256];
738 PyObject *rfdslist = NULL;
739
740 if (!PyArg_ParseTuple(args, "i", &sockfd))
741 return NULL;
742
743 rfdscount = recvfdstobuf(sockfd, &rfds, cbuf, sizeof(cbuf));
744 if (rfdscount < 0)
745 return PyErr_SetFromErrno(PyExc_OSError);
746
747 rfdslist = PyList_New(rfdscount);
748 if (!rfdslist)
749 goto bail;
750 for (i = 0; i < rfdscount; i++) {
751 PyObject *obj = PyLong_FromLong(rfds[i]);
752 if (!obj)
753 goto bail;
754 PyList_SET_ITEM(rfdslist, i, obj);
755 }
756 return rfdslist;
757
758 bail:
759 Py_XDECREF(rfdslist);
760 return NULL;
761 }
762
763 #endif /* CMSG_LEN */
764
765 /* allow disabling setprocname via compiler flags */
688 /* allow disabling setprocname via compiler flags */
766 #ifndef SETPROCNAME_USE_NONE
689 #ifndef SETPROCNAME_USE_NONE
767 #if defined(HAVE_SETPROCTITLE)
690 #if defined(HAVE_SETPROCTITLE)
768 /* setproctitle is the first choice - available in FreeBSD */
691 /* setproctitle is the first choice - available in FreeBSD */
769 #define SETPROCNAME_USE_SETPROCTITLE
692 #define SETPROCNAME_USE_SETPROCTITLE
770 #elif (defined(__linux__) || defined(__APPLE__)) && PY_MAJOR_VERSION == 2
771 /* rewrite the argv buffer in place - works in Linux and OS X. Py_GetArgcArgv
772 * in Python 3 returns the copied wchar_t **argv, thus unsupported. */
773 #define SETPROCNAME_USE_ARGVREWRITE
774 #else
693 #else
775 #define SETPROCNAME_USE_NONE
694 #define SETPROCNAME_USE_NONE
776 #endif
695 #endif
@@ -780,49 +699,11 b' bail:'
780 static PyObject *setprocname(PyObject *self, PyObject *args)
699 static PyObject *setprocname(PyObject *self, PyObject *args)
781 {
700 {
782 const char *name = NULL;
701 const char *name = NULL;
783 if (!PyArg_ParseTuple(args, PY23("s", "y"), &name))
702 if (!PyArg_ParseTuple(args, "y", &name))
784 return NULL;
703 return NULL;
785
704
786 #if defined(SETPROCNAME_USE_SETPROCTITLE)
705 #if defined(SETPROCNAME_USE_SETPROCTITLE)
787 setproctitle("%s", name);
706 setproctitle("%s", name);
788 #elif defined(SETPROCNAME_USE_ARGVREWRITE)
789 {
790 static char *argvstart = NULL;
791 static size_t argvsize = 0;
792 if (argvstart == NULL) {
793 int argc = 0, i;
794 char **argv = NULL;
795 char *argvend;
796 extern void Py_GetArgcArgv(int *argc, char ***argv);
797 Py_GetArgcArgv(&argc, &argv);
798 /* Py_GetArgcArgv may not do much if a custom python
799 * launcher is used that doesn't record the information
800 * it needs. Let's handle this gracefully instead of
801 * segfaulting. */
802 if (argv != NULL)
803 argvend = argvstart = argv[0];
804 else
805 argvend = argvstart = NULL;
806
807 /* Check the memory we can use. Typically, argv[i] and
808 * argv[i + 1] are continuous. */
809 for (i = 0; i < argc; ++i) {
810 size_t len;
811 if (argv[i] > argvend || argv[i] < argvstart)
812 break; /* not continuous */
813 len = strlen(argv[i]);
814 argvend = argv[i] + len + 1 /* '\0' */;
815 }
816 if (argvend > argvstart) /* sanity check */
817 argvsize = argvend - argvstart;
818 }
819
820 if (argvstart && argvsize > 1) {
821 int n = snprintf(argvstart, argvsize, "%s", name);
822 if (n >= 0 && (size_t)n < argvsize)
823 memset(argvstart + n, 0, argvsize - n);
824 }
825 }
826 #endif
707 #endif
827
708
828 Py_RETURN_NONE;
709 Py_RETURN_NONE;
@@ -1135,14 +1016,14 b' static PyObject *getfstype(PyObject *sel'
1135 const char *path = NULL;
1016 const char *path = NULL;
1136 struct statfs buf;
1017 struct statfs buf;
1137 int r;
1018 int r;
1138 if (!PyArg_ParseTuple(args, PY23("s", "y"), &path))
1019 if (!PyArg_ParseTuple(args, "y", &path))
1139 return NULL;
1020 return NULL;
1140
1021
1141 memset(&buf, 0, sizeof(buf));
1022 memset(&buf, 0, sizeof(buf));
1142 r = statfs(path, &buf);
1023 r = statfs(path, &buf);
1143 if (r != 0)
1024 if (r != 0)
1144 return PyErr_SetFromErrno(PyExc_OSError);
1025 return PyErr_SetFromErrno(PyExc_OSError);
1145 return Py_BuildValue(PY23("s", "y"), describefstype(&buf));
1026 return Py_BuildValue("y", describefstype(&buf));
1146 }
1027 }
1147 #endif /* defined(HAVE_LINUX_STATFS) || defined(HAVE_BSD_STATFS) */
1028 #endif /* defined(HAVE_LINUX_STATFS) || defined(HAVE_BSD_STATFS) */
1148
1029
@@ -1153,14 +1034,14 b' static PyObject *getfsmountpoint(PyObjec'
1153 const char *path = NULL;
1034 const char *path = NULL;
1154 struct statfs buf;
1035 struct statfs buf;
1155 int r;
1036 int r;
1156 if (!PyArg_ParseTuple(args, PY23("s", "y"), &path))
1037 if (!PyArg_ParseTuple(args, "y", &path))
1157 return NULL;
1038 return NULL;
1158
1039
1159 memset(&buf, 0, sizeof(buf));
1040 memset(&buf, 0, sizeof(buf));
1160 r = statfs(path, &buf);
1041 r = statfs(path, &buf);
1161 if (r != 0)
1042 if (r != 0)
1162 return PyErr_SetFromErrno(PyExc_OSError);
1043 return PyErr_SetFromErrno(PyExc_OSError);
1163 return Py_BuildValue(PY23("s", "y"), buf.f_mntonname);
1044 return Py_BuildValue("y", buf.f_mntonname);
1164 }
1045 }
1165 #endif /* defined(HAVE_BSD_STATFS) */
1046 #endif /* defined(HAVE_BSD_STATFS) */
1166
1047
@@ -1195,8 +1076,7 b' static PyObject *listdir(PyObject *self,'
1195
1076
1196 static char *kwlist[] = {"path", "stat", "skip", NULL};
1077 static char *kwlist[] = {"path", "stat", "skip", NULL};
1197
1078
1198 if (!PyArg_ParseTupleAndKeywords(args, kwargs, PY23("s#|OO:listdir",
1079 if (!PyArg_ParseTupleAndKeywords(args, kwargs, "y#|OO:listdir",
1199 "y#|OO:listdir"),
1200 kwlist, &path, &plen, &statobj, &skipobj))
1080 kwlist, &path, &plen, &statobj, &skipobj))
1201 return NULL;
1081 return NULL;
1202
1082
@@ -1227,12 +1107,8 b' static PyObject *posixfile(PyObject *sel'
1227 char fpmode[4];
1107 char fpmode[4];
1228 int fppos = 0;
1108 int fppos = 0;
1229 int plus;
1109 int plus;
1230 #ifndef IS_PY3K
1231 FILE *fp;
1232 #endif
1233
1110
1234 if (!PyArg_ParseTupleAndKeywords(args, kwds, PY23("et|si:posixfile",
1111 if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|yi:posixfile",
1235 "et|yi:posixfile"),
1236 kwlist,
1112 kwlist,
1237 Py_FileSystemDefaultEncoding,
1113 Py_FileSystemDefaultEncoding,
1238 &name, &mode, &bufsize))
1114 &name, &mode, &bufsize))
@@ -1302,26 +1178,9 b' static PyObject *posixfile(PyObject *sel'
1302 PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
1178 PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
1303 goto bail;
1179 goto bail;
1304 }
1180 }
1305 #ifndef IS_PY3K
1306 fp = _fdopen(fd, fpmode);
1307 if (fp == NULL) {
1308 _close(fd);
1309 PyErr_SetFromErrnoWithFilename(PyExc_IOError, name);
1310 goto bail;
1311 }
1312
1313 file_obj = PyFile_FromFile(fp, name, mode, fclose);
1314 if (file_obj == NULL) {
1315 fclose(fp);
1316 goto bail;
1317 }
1318
1319 PyFile_SetBufSize(file_obj, bufsize);
1320 #else
1321 file_obj = PyFile_FromFd(fd, name, mode, bufsize, NULL, NULL, NULL, 1);
1181 file_obj = PyFile_FromFd(fd, name, mode, bufsize, NULL, NULL, NULL, 1);
1322 if (file_obj == NULL)
1182 if (file_obj == NULL)
1323 goto bail;
1183 goto bail;
1324 #endif
1325 bail:
1184 bail:
1326 PyMem_Free(name);
1185 PyMem_Free(name);
1327 return file_obj;
1186 return file_obj;
@@ -1357,10 +1216,6 b' static PyMethodDef methods[] = {'
1357 {"statfiles", (PyCFunction)statfiles, METH_VARARGS | METH_KEYWORDS,
1216 {"statfiles", (PyCFunction)statfiles, METH_VARARGS | METH_KEYWORDS,
1358 "stat a series of files or symlinks\n"
1217 "stat a series of files or symlinks\n"
1359 "Returns None for non-existent entries and entries of other types.\n"},
1218 "Returns None for non-existent entries and entries of other types.\n"},
1360 #ifdef CMSG_LEN
1361 {"recvfds", (PyCFunction)recvfds, METH_VARARGS,
1362 "receive list of file descriptors via socket\n"},
1363 #endif
1364 #ifndef SETPROCNAME_USE_NONE
1219 #ifndef SETPROCNAME_USE_NONE
1365 {"setprocname", (PyCFunction)setprocname, METH_VARARGS,
1220 {"setprocname", (PyCFunction)setprocname, METH_VARARGS,
1366 "set process title (best-effort)\n"},
1221 "set process title (best-effort)\n"},
@@ -1387,7 +1242,6 b' static PyMethodDef methods[] = {'
1387
1242
1388 static const int version = 4;
1243 static const int version = 4;
1389
1244
1390 #ifdef IS_PY3K
1391 static struct PyModuleDef osutil_module = {
1245 static struct PyModuleDef osutil_module = {
1392 PyModuleDef_HEAD_INIT,
1246 PyModuleDef_HEAD_INIT,
1393 "osutil",
1247 "osutil",
@@ -1406,14 +1260,3 b' PyMODINIT_FUNC PyInit_osutil(void)'
1406 PyModule_AddIntConstant(m, "version", version);
1260 PyModule_AddIntConstant(m, "version", version);
1407 return m;
1261 return m;
1408 }
1262 }
1409 #else
1410 PyMODINIT_FUNC initosutil(void)
1411 {
1412 PyObject *m;
1413 if (PyType_Ready(&listdir_stat_type) == -1)
1414 return;
1415
1416 m = Py_InitModule3("osutil", methods, osutil_doc);
1417 PyModule_AddIntConstant(m, "version", version);
1418 }
1419 #endif
@@ -18,7 +18,6 b' class stat:'
18 def listdir(path: bytes, st: bool, skip: bool) -> List[stat]: ...
18 def listdir(path: bytes, st: bool, skip: bool) -> List[stat]: ...
19 def posixfile(name: AnyStr, mode: bytes, buffering: int) -> IO: ...
19 def posixfile(name: AnyStr, mode: bytes, buffering: int) -> IO: ...
20 def statfiles(names: Sequence[bytes]) -> List[stat]: ...
20 def statfiles(names: Sequence[bytes]) -> List[stat]: ...
21 def recvfds(sockfd: int) -> List[int]: ...
22 def setprocname(name: bytes) -> None: ...
21 def setprocname(name: bytes) -> None: ...
23 def getfstype(path: bytes) -> bytes: ...
22 def getfstype(path: bytes) -> bytes: ...
24 def getfsmountpoint(path: bytes) -> bytes: ...
23 def getfsmountpoint(path: bytes) -> bytes: ...
@@ -17,22 +17,6 b''
17 #include "charencode.h"
17 #include "charencode.h"
18 #include "util.h"
18 #include "util.h"
19
19
20 #ifdef IS_PY3K
21 /* The mapping of Python types is meant to be temporary to get Python
22 * 3 to compile. We should remove this once Python 3 support is fully
23 * supported and proper types are used in the extensions themselves. */
24 #define PyInt_Check PyLong_Check
25 #define PyInt_FromLong PyLong_FromLong
26 #define PyInt_FromSsize_t PyLong_FromSsize_t
27 #define PyInt_AsLong PyLong_AsLong
28 #else
29 /* Windows on Python 2.7 doesn't define S_IFLNK. Python 3+ defines via
30 * pyport.h. */
31 #ifndef S_IFLNK
32 #define S_IFLNK 0120000
33 #endif
34 #endif
35
36 static const char *const versionerrortext = "Python minor version mismatch";
20 static const char *const versionerrortext = "Python minor version mismatch";
37
21
38 static const int dirstate_v1_from_p2 = -2;
22 static const int dirstate_v1_from_p2 = -2;
@@ -305,27 +289,6 b' static PyObject *dirstate_item_v2_data(d'
305 self->mtime_ns);
289 self->mtime_ns);
306 };
290 };
307
291
308 static PyObject *dirstate_item_v1_state(dirstateItemObject *self)
309 {
310 char state = dirstate_item_c_v1_state(self);
311 return PyBytes_FromStringAndSize(&state, 1);
312 };
313
314 static PyObject *dirstate_item_v1_mode(dirstateItemObject *self)
315 {
316 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
317 };
318
319 static PyObject *dirstate_item_v1_size(dirstateItemObject *self)
320 {
321 return PyInt_FromLong(dirstate_item_c_v1_size(self));
322 };
323
324 static PyObject *dirstate_item_v1_mtime(dirstateItemObject *self)
325 {
326 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
327 };
328
329 static PyObject *dirstate_item_mtime_likely_equal_to(dirstateItemObject *self,
292 static PyObject *dirstate_item_mtime_likely_equal_to(dirstateItemObject *self,
330 PyObject *other)
293 PyObject *other)
331 {
294 {
@@ -411,7 +374,7 b' dirstate_item_from_v1_data(char state, i'
411 } else {
374 } else {
412 PyErr_Format(PyExc_RuntimeError,
375 PyErr_Format(PyExc_RuntimeError,
413 "unknown state: `%c` (%d, %d, %d)", state, mode,
376 "unknown state: `%c` (%d, %d, %d)", state, mode,
414 size, mtime, NULL);
377 size, mtime);
415 Py_DECREF(t);
378 Py_DECREF(t);
416 return NULL;
379 return NULL;
417 }
380 }
@@ -419,20 +382,6 b' dirstate_item_from_v1_data(char state, i'
419 return t;
382 return t;
420 }
383 }
421
384
422 /* This will never change since it's bound to V1, unlike `dirstate_item_new` */
423 static PyObject *dirstate_item_from_v1_meth(PyTypeObject *subtype,
424 PyObject *args)
425 {
426 /* We do all the initialization here and not a tp_init function because
427 * dirstate_item is immutable. */
428 char state;
429 int size, mode, mtime;
430 if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) {
431 return NULL;
432 }
433 return (PyObject *)dirstate_item_from_v1_data(state, mode, size, mtime);
434 };
435
436 static PyObject *dirstate_item_from_v2_meth(PyTypeObject *subtype,
385 static PyObject *dirstate_item_from_v2_meth(PyTypeObject *subtype,
437 PyObject *args)
386 PyObject *args)
438 {
387 {
@@ -542,18 +491,8 b' static PyObject *dirstate_item_drop_merg'
542 static PyMethodDef dirstate_item_methods[] = {
491 static PyMethodDef dirstate_item_methods[] = {
543 {"v2_data", (PyCFunction)dirstate_item_v2_data, METH_NOARGS,
492 {"v2_data", (PyCFunction)dirstate_item_v2_data, METH_NOARGS,
544 "return data suitable for v2 serialization"},
493 "return data suitable for v2 serialization"},
545 {"v1_state", (PyCFunction)dirstate_item_v1_state, METH_NOARGS,
546 "return a \"state\" suitable for v1 serialization"},
547 {"v1_mode", (PyCFunction)dirstate_item_v1_mode, METH_NOARGS,
548 "return a \"mode\" suitable for v1 serialization"},
549 {"v1_size", (PyCFunction)dirstate_item_v1_size, METH_NOARGS,
550 "return a \"size\" suitable for v1 serialization"},
551 {"v1_mtime", (PyCFunction)dirstate_item_v1_mtime, METH_NOARGS,
552 "return a \"mtime\" suitable for v1 serialization"},
553 {"mtime_likely_equal_to", (PyCFunction)dirstate_item_mtime_likely_equal_to,
494 {"mtime_likely_equal_to", (PyCFunction)dirstate_item_mtime_likely_equal_to,
554 METH_O, "True if the stored mtime is likely equal to the given mtime"},
495 METH_O, "True if the stored mtime is likely equal to the given mtime"},
555 {"from_v1_data", (PyCFunction)dirstate_item_from_v1_meth,
556 METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V1 data"},
557 {"from_v2_data", (PyCFunction)dirstate_item_from_v2_meth,
496 {"from_v2_data", (PyCFunction)dirstate_item_from_v2_meth,
558 METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V2 data"},
497 METH_VARARGS | METH_CLASS, "build a new DirstateItem object from V2 data"},
559 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
498 {"set_possibly_dirty", (PyCFunction)dirstate_item_set_possibly_dirty,
@@ -571,17 +510,17 b' static PyMethodDef dirstate_item_methods'
571
510
572 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
511 static PyObject *dirstate_item_get_mode(dirstateItemObject *self)
573 {
512 {
574 return PyInt_FromLong(dirstate_item_c_v1_mode(self));
513 return PyLong_FromLong(dirstate_item_c_v1_mode(self));
575 };
514 };
576
515
577 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
516 static PyObject *dirstate_item_get_size(dirstateItemObject *self)
578 {
517 {
579 return PyInt_FromLong(dirstate_item_c_v1_size(self));
518 return PyLong_FromLong(dirstate_item_c_v1_size(self));
580 };
519 };
581
520
582 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
521 static PyObject *dirstate_item_get_mtime(dirstateItemObject *self)
583 {
522 {
584 return PyInt_FromLong(dirstate_item_c_v1_mtime(self));
523 return PyLong_FromLong(dirstate_item_c_v1_mtime(self));
585 };
524 };
586
525
587 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
526 static PyObject *dirstate_item_get_state(dirstateItemObject *self)
@@ -831,9 +770,8 b' static PyObject *parse_dirstate(PyObject'
831 Py_ssize_t len = 40;
770 Py_ssize_t len = 40;
832 Py_ssize_t readlen;
771 Py_ssize_t readlen;
833
772
834 if (!PyArg_ParseTuple(
773 if (!PyArg_ParseTuple(args, "O!O!y#:parse_dirstate", &PyDict_Type,
835 args, PY23("O!O!s#:parse_dirstate", "O!O!y#:parse_dirstate"),
774 &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
836 &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) {
837 goto quit;
775 goto quit;
838 }
776 }
839
777
@@ -846,8 +784,8 b' static PyObject *parse_dirstate(PyObject'
846 goto quit;
784 goto quit;
847 }
785 }
848
786
849 parents = Py_BuildValue(PY23("s#s#", "y#y#"), str, (Py_ssize_t)20,
787 parents = Py_BuildValue("y#y#", str, (Py_ssize_t)20, str + 20,
850 str + 20, (Py_ssize_t)20);
788 (Py_ssize_t)20);
851 if (!parents) {
789 if (!parents) {
852 goto quit;
790 goto quit;
853 }
791 }
@@ -1176,8 +1114,7 b' static PyObject *fm1readmarkers(PyObject'
1176 Py_ssize_t datalen, offset, stop;
1114 Py_ssize_t datalen, offset, stop;
1177 PyObject *markers = NULL;
1115 PyObject *markers = NULL;
1178
1116
1179 if (!PyArg_ParseTuple(args, PY23("s#nn", "y#nn"), &data, &datalen,
1117 if (!PyArg_ParseTuple(args, "y#nn", &data, &datalen, &offset, &stop)) {
1180 &offset, &stop)) {
1181 return NULL;
1118 return NULL;
1182 }
1119 }
1183 if (offset < 0) {
1120 if (offset < 0) {
@@ -1289,7 +1226,7 b' static int check_python_version(void)'
1289 if (!ver) {
1226 if (!ver) {
1290 return -1;
1227 return -1;
1291 }
1228 }
1292 hexversion = PyInt_AsLong(ver);
1229 hexversion = PyLong_AsLong(ver);
1293 Py_DECREF(ver);
1230 Py_DECREF(ver);
1294 /* sys.hexversion is a 32-bit number by default, so the -1 case
1231 /* sys.hexversion is a 32-bit number by default, so the -1 case
1295 * should only occur in unusual circumstances (e.g. if sys.hexversion
1232 * should only occur in unusual circumstances (e.g. if sys.hexversion
@@ -1309,7 +1246,6 b' static int check_python_version(void)'
1309 return 0;
1246 return 0;
1310 }
1247 }
1311
1248
1312 #ifdef IS_PY3K
1313 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
1249 static struct PyModuleDef parsers_module = {PyModuleDef_HEAD_INIT, "parsers",
1314 parsers_doc, -1, methods};
1250 parsers_doc, -1, methods};
1315
1251
@@ -1323,15 +1259,3 b' PyMODINIT_FUNC PyInit_parsers(void)'
1323 module_init(mod);
1259 module_init(mod);
1324 return mod;
1260 return mod;
1325 }
1261 }
1326 #else
1327 PyMODINIT_FUNC initparsers(void)
1328 {
1329 PyObject *mod;
1330
1331 if (check_python_version() == -1) {
1332 return;
1333 }
1334 mod = Py_InitModule3("parsers", methods, parsers_doc);
1335 module_init(mod);
1336 }
1337 #endif
@@ -535,8 +535,7 b' PyObject *lowerencode(PyObject *self, Py'
535 Py_ssize_t len, newlen;
535 Py_ssize_t len, newlen;
536 PyObject *ret;
536 PyObject *ret;
537
537
538 if (!PyArg_ParseTuple(args, PY23("s#:lowerencode", "y#:lowerencode"),
538 if (!PyArg_ParseTuple(args, "y#:lowerencode", &path, &len)) {
539 &path, &len)) {
540 return NULL;
539 return NULL;
541 }
540 }
542
541
@@ -711,7 +710,7 b' static int sha1hash(char hash[20], const'
711 }
710 }
712 }
711 }
713
712
714 shaobj = PyObject_CallFunction(shafunc, PY23("s#", "y#"), str, len);
713 shaobj = PyObject_CallFunction(shafunc, "y#", str, len);
715
714
716 if (shaobj == NULL) {
715 if (shaobj == NULL) {
717 return -1;
716 return -1;
@@ -23,16 +23,6 b''
23 #include "revlog.h"
23 #include "revlog.h"
24 #include "util.h"
24 #include "util.h"
25
25
26 #ifdef IS_PY3K
27 /* The mapping of Python types is meant to be temporary to get Python
28 * 3 to compile. We should remove this once Python 3 support is fully
29 * supported and proper types are used in the extensions themselves. */
30 #define PyInt_Check PyLong_Check
31 #define PyInt_FromLong PyLong_FromLong
32 #define PyInt_FromSsize_t PyLong_FromSsize_t
33 #define PyInt_AsLong PyLong_AsLong
34 #endif
35
36 typedef struct indexObjectStruct indexObject;
26 typedef struct indexObjectStruct indexObject;
37
27
38 typedef struct {
28 typedef struct {
@@ -43,6 +33,7 b' typedef struct {'
43 int abi_version;
33 int abi_version;
44 Py_ssize_t (*index_length)(const indexObject *);
34 Py_ssize_t (*index_length)(const indexObject *);
45 const char *(*index_node)(indexObject *, Py_ssize_t);
35 const char *(*index_node)(indexObject *, Py_ssize_t);
36 int (*fast_rank)(indexObject *, Py_ssize_t);
46 int (*index_parents)(PyObject *, int, int *);
37 int (*index_parents)(PyObject *, int, int *);
47 } Revlog_CAPI;
38 } Revlog_CAPI;
48
39
@@ -119,11 +110,9 b' static Py_ssize_t inline_scan(indexObjec'
119 static int index_find_node(indexObject *self, const char *node);
110 static int index_find_node(indexObject *self, const char *node);
120
111
121 #if LONG_MAX == 0x7fffffffL
112 #if LONG_MAX == 0x7fffffffL
122 static const char *const tuple_format =
113 static const char *const tuple_format = "Kiiiiiiy#KiBBi";
123 PY23("Kiiiiiis#KiBBi", "Kiiiiiiy#KiBBi");
124 #else
114 #else
125 static const char *const tuple_format =
115 static const char *const tuple_format = "kiiiiiiy#kiBBi";
126 PY23("kiiiiiis#kiBBi", "kiiiiiiy#kiBBi");
127 #endif
116 #endif
128
117
129 /* A RevlogNG v1 index entry is 64 bytes long. */
118 /* A RevlogNG v1 index entry is 64 bytes long. */
@@ -502,13 +491,13 b' static PyObject *index_pack_header(index'
502 {
491 {
503 int header;
492 int header;
504 char out[4];
493 char out[4];
505 if (!PyArg_ParseTuple(args, "I", &header)) {
494 if (!PyArg_ParseTuple(args, "i", &header)) {
506 return NULL;
495 return NULL;
507 }
496 }
508 if (self->format_version != format_v1) {
497 if (self->format_version != format_v1) {
509 PyErr_Format(PyExc_RuntimeError,
498 PyErr_Format(PyExc_RuntimeError,
510 "version header should go in the docket, not the "
499 "version header should go in the docket, not the "
511 "index: %lu",
500 "index: %d",
512 header);
501 header);
513 return NULL;
502 return NULL;
514 }
503 }
@@ -576,6 +565,33 b' static const char *index_node(indexObjec'
576 }
565 }
577
566
578 /*
567 /*
568 * Return the stored rank of a given revision if known, or rank_unknown
569 * otherwise.
570 *
571 * The rank of a revision is the size of the sub-graph it defines as a head.
572 * Equivalently, the rank of a revision `r` is the size of the set
573 * `ancestors(r)`, `r` included.
574 *
575 * This method returns the rank retrieved from the revlog in constant time. It
576 * makes no attempt at computing unknown values for versions of the revlog
577 * which do not persist the rank.
578 */
579 static int index_fast_rank(indexObject *self, Py_ssize_t pos)
580 {
581 Py_ssize_t length = index_length(self);
582
583 if (self->format_version != format_cl2 || pos >= length) {
584 return rank_unknown;
585 }
586
587 if (pos == nullrev) {
588 return 0; /* convention */
589 }
590
591 return getbe32(index_deref(self, pos) + entry_cl2_offset_rank);
592 }
593
594 /*
579 * Return the hash of the node corresponding to the given rev. The
595 * Return the hash of the node corresponding to the given rev. The
580 * rev is assumed to be existing. If not, an exception is set.
596 * rev is assumed to be existing. If not, an exception is set.
581 */
597 */
@@ -730,9 +746,9 b' static PyObject *index_replace_sidedata_'
730 char comp_mode;
746 char comp_mode;
731 char *data;
747 char *data;
732 #if LONG_MAX == 0x7fffffffL
748 #if LONG_MAX == 0x7fffffffL
733 const char *const sidedata_format = PY23("nKiKB", "nKiKB");
749 const char *const sidedata_format = "nKiKB";
734 #else
750 #else
735 const char *const sidedata_format = PY23("nkikB", "nkikB");
751 const char *const sidedata_format = "nkikB";
736 #endif
752 #endif
737
753
738 if (self->entry_size == v1_entry_size || self->inlined) {
754 if (self->entry_size == v1_entry_size || self->inlined) {
@@ -802,7 +818,7 b' static PyObject *index_stats(indexObject'
802 #define istat(__n, __d) \
818 #define istat(__n, __d) \
803 do { \
819 do { \
804 s = PyBytes_FromString(__d); \
820 s = PyBytes_FromString(__d); \
805 t = PyInt_FromSsize_t(self->__n); \
821 t = PyLong_FromSsize_t(self->__n); \
806 if (!s || !t) \
822 if (!s || !t) \
807 goto bail; \
823 goto bail; \
808 if (PyDict_SetItem(obj, s, t) == -1) \
824 if (PyDict_SetItem(obj, s, t) == -1) \
@@ -953,7 +969,7 b' static PyObject *reachableroots2(indexOb'
953
969
954 l = PyList_GET_SIZE(roots);
970 l = PyList_GET_SIZE(roots);
955 for (i = 0; i < l; i++) {
971 for (i = 0; i < l; i++) {
956 revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i));
972 revnum = PyLong_AsLong(PyList_GET_ITEM(roots, i));
957 if (revnum == -1 && PyErr_Occurred())
973 if (revnum == -1 && PyErr_Occurred())
958 goto bail;
974 goto bail;
959 /* If root is out of range, e.g. wdir(), it must be unreachable
975 /* If root is out of range, e.g. wdir(), it must be unreachable
@@ -966,7 +982,7 b' static PyObject *reachableroots2(indexOb'
966 /* Populate tovisit with all the heads */
982 /* Populate tovisit with all the heads */
967 l = PyList_GET_SIZE(heads);
983 l = PyList_GET_SIZE(heads);
968 for (i = 0; i < l; i++) {
984 for (i = 0; i < l; i++) {
969 revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i));
985 revnum = PyLong_AsLong(PyList_GET_ITEM(heads, i));
970 if (revnum == -1 && PyErr_Occurred())
986 if (revnum == -1 && PyErr_Occurred())
971 goto bail;
987 goto bail;
972 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
988 if (revnum + 1 < 0 || revnum + 1 >= len + 1) {
@@ -986,7 +1002,7 b' static PyObject *reachableroots2(indexOb'
986 revnum = tovisit[k++];
1002 revnum = tovisit[k++];
987 if (revstates[revnum + 1] & RS_ROOT) {
1003 if (revstates[revnum + 1] & RS_ROOT) {
988 revstates[revnum + 1] |= RS_REACHABLE;
1004 revstates[revnum + 1] |= RS_REACHABLE;
989 val = PyInt_FromLong(revnum);
1005 val = PyLong_FromLong(revnum);
990 if (val == NULL)
1006 if (val == NULL)
991 goto bail;
1007 goto bail;
992 r = PyList_Append(reachable, val);
1008 r = PyList_Append(reachable, val);
@@ -1031,7 +1047,7 b' static PyObject *reachableroots2(indexOb'
1031 RS_REACHABLE) &&
1047 RS_REACHABLE) &&
1032 !(revstates[i + 1] & RS_REACHABLE)) {
1048 !(revstates[i + 1] & RS_REACHABLE)) {
1033 revstates[i + 1] |= RS_REACHABLE;
1049 revstates[i + 1] |= RS_REACHABLE;
1034 val = PyInt_FromSsize_t(i);
1050 val = PyLong_FromSsize_t(i);
1035 if (val == NULL)
1051 if (val == NULL)
1036 goto bail;
1052 goto bail;
1037 r = PyList_Append(reachable, val);
1053 r = PyList_Append(reachable, val);
@@ -1116,7 +1132,7 b' static PyObject *compute_phases_map_sets'
1116 }
1132 }
1117
1133
1118 for (i = 0; i < numphases; ++i) {
1134 for (i = 0; i < numphases; ++i) {
1119 PyObject *pyphase = PyInt_FromLong(trackedphases[i]);
1135 PyObject *pyphase = PyLong_FromLong(trackedphases[i]);
1120 PyObject *phaseroots = NULL;
1136 PyObject *phaseroots = NULL;
1121 if (pyphase == NULL)
1137 if (pyphase == NULL)
1122 goto release;
1138 goto release;
@@ -1175,7 +1191,7 b' static PyObject *compute_phases_map_sets'
1175 "bad phase number in internal list");
1191 "bad phase number in internal list");
1176 goto release;
1192 goto release;
1177 }
1193 }
1178 pyrev = PyInt_FromLong(rev);
1194 pyrev = PyLong_FromLong(rev);
1179 if (pyrev == NULL)
1195 if (pyrev == NULL)
1180 goto release;
1196 goto release;
1181 if (PySet_Add(pyphase, pyrev) == -1) {
1197 if (PySet_Add(pyphase, pyrev) == -1) {
@@ -1189,7 +1205,7 b' static PyObject *compute_phases_map_sets'
1189 if (phasesetsdict == NULL)
1205 if (phasesetsdict == NULL)
1190 goto release;
1206 goto release;
1191 for (i = 0; i < numphases; ++i) {
1207 for (i = 0; i < numphases; ++i) {
1192 PyObject *pyphase = PyInt_FromLong(trackedphases[i]);
1208 PyObject *pyphase = PyLong_FromLong(trackedphases[i]);
1193 if (pyphase == NULL)
1209 if (pyphase == NULL)
1194 goto release;
1210 goto release;
1195 if (PyDict_SetItem(phasesetsdict, pyphase, phasesets[i]) ==
1211 if (PyDict_SetItem(phasesetsdict, pyphase, phasesets[i]) ==
@@ -1247,7 +1263,7 b' static PyObject *index_headrevs(indexObj'
1247 if (heads == NULL)
1263 if (heads == NULL)
1248 goto bail;
1264 goto bail;
1249 if (len == 0) {
1265 if (len == 0) {
1250 PyObject *nullid = PyInt_FromLong(-1);
1266 PyObject *nullid = PyLong_FromLong(-1);
1251 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
1267 if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
1252 Py_XDECREF(nullid);
1268 Py_XDECREF(nullid);
1253 goto bail;
1269 goto bail;
@@ -1296,7 +1312,7 b' static PyObject *index_headrevs(indexObj'
1296
1312
1297 if (nothead[i])
1313 if (nothead[i])
1298 continue;
1314 continue;
1299 head = PyInt_FromSsize_t(i);
1315 head = PyLong_FromSsize_t(i);
1300 if (head == NULL || PyList_Append(heads, head) == -1) {
1316 if (head == NULL || PyList_Append(heads, head) == -1) {
1301 Py_XDECREF(head);
1317 Py_XDECREF(head);
1302 goto bail;
1318 goto bail;
@@ -1442,7 +1458,7 b' static PyObject *index_findsnapshots(ind'
1442 assert(PyErr_Occurred());
1458 assert(PyErr_Occurred());
1443 goto bail;
1459 goto bail;
1444 }
1460 }
1445 key = PyInt_FromSsize_t(base);
1461 key = PyLong_FromSsize_t(base);
1446 allvalues = PyDict_GetItem(cache, key);
1462 allvalues = PyDict_GetItem(cache, key);
1447 if (allvalues == NULL && PyErr_Occurred()) {
1463 if (allvalues == NULL && PyErr_Occurred()) {
1448 goto bail;
1464 goto bail;
@@ -1459,7 +1475,7 b' static PyObject *index_findsnapshots(ind'
1459 goto bail;
1475 goto bail;
1460 }
1476 }
1461 }
1477 }
1462 value = PyInt_FromSsize_t(rev);
1478 value = PyLong_FromSsize_t(rev);
1463 if (PyList_Append(allvalues, value)) {
1479 if (PyList_Append(allvalues, value)) {
1464 goto bail;
1480 goto bail;
1465 }
1481 }
@@ -1486,8 +1502,8 b' static PyObject *index_deltachain(indexO'
1486 return NULL;
1502 return NULL;
1487 }
1503 }
1488
1504
1489 if (PyInt_Check(stoparg)) {
1505 if (PyLong_Check(stoparg)) {
1490 stoprev = (int)PyInt_AsLong(stoparg);
1506 stoprev = (int)PyLong_AsLong(stoparg);
1491 if (stoprev == -1 && PyErr_Occurred()) {
1507 if (stoprev == -1 && PyErr_Occurred()) {
1492 return NULL;
1508 return NULL;
1493 }
1509 }
@@ -1521,7 +1537,7 b' static PyObject *index_deltachain(indexO'
1521 iterrev = rev;
1537 iterrev = rev;
1522
1538
1523 while (iterrev != baserev && iterrev != stoprev) {
1539 while (iterrev != baserev && iterrev != stoprev) {
1524 PyObject *value = PyInt_FromLong(iterrev);
1540 PyObject *value = PyLong_FromLong(iterrev);
1525 if (value == NULL) {
1541 if (value == NULL) {
1526 goto bail;
1542 goto bail;
1527 }
1543 }
@@ -1560,7 +1576,7 b' static PyObject *index_deltachain(indexO'
1560 if (iterrev == stoprev) {
1576 if (iterrev == stoprev) {
1561 stopped = 1;
1577 stopped = 1;
1562 } else {
1578 } else {
1563 PyObject *value = PyInt_FromLong(iterrev);
1579 PyObject *value = PyLong_FromLong(iterrev);
1564 if (value == NULL) {
1580 if (value == NULL) {
1565 goto bail;
1581 goto bail;
1566 }
1582 }
@@ -1712,7 +1728,8 b' static PyObject *index_slicechunktodensi'
1712 goto bail;
1728 goto bail;
1713 }
1729 }
1714 for (i = 0; i < num_revs; i++) {
1730 for (i = 0; i < num_revs; i++) {
1715 Py_ssize_t revnum = PyInt_AsLong(PyList_GET_ITEM(list_revs, i));
1731 Py_ssize_t revnum =
1732 PyLong_AsLong(PyList_GET_ITEM(list_revs, i));
1716 if (revnum == -1 && PyErr_Occurred()) {
1733 if (revnum == -1 && PyErr_Occurred()) {
1717 goto bail;
1734 goto bail;
1718 }
1735 }
@@ -2118,7 +2135,7 b' static PyObject *ntobj_shortest(nodetree'
2118 raise_revlog_error();
2135 raise_revlog_error();
2119 return NULL;
2136 return NULL;
2120 }
2137 }
2121 return PyInt_FromLong(length);
2138 return PyLong_FromLong(length);
2122 }
2139 }
2123
2140
2124 static void nt_dealloc(nodetree *self)
2141 static void nt_dealloc(nodetree *self)
@@ -2266,7 +2283,7 b' static PyObject *index_getitem(indexObje'
2266 char *node;
2283 char *node;
2267 int rev;
2284 int rev;
2268
2285
2269 if (PyInt_Check(value)) {
2286 if (PyLong_Check(value)) {
2270 long idx;
2287 long idx;
2271 if (!pylong_to_long(value, &idx)) {
2288 if (!pylong_to_long(value, &idx)) {
2272 return NULL;
2289 return NULL;
@@ -2278,7 +2295,7 b' static PyObject *index_getitem(indexObje'
2278 return NULL;
2295 return NULL;
2279 rev = index_find_node(self, node);
2296 rev = index_find_node(self, node);
2280 if (rev >= -1)
2297 if (rev >= -1)
2281 return PyInt_FromLong(rev);
2298 return PyLong_FromLong(rev);
2282 if (rev == -2)
2299 if (rev == -2)
2283 raise_revlog_error();
2300 raise_revlog_error();
2284 return NULL;
2301 return NULL;
@@ -2310,7 +2327,7 b' static PyObject *index_partialmatch(inde'
2310 char *node;
2327 char *node;
2311 int rev, i;
2328 int rev, i;
2312
2329
2313 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
2330 if (!PyArg_ParseTuple(args, "y#", &node, &nodelen))
2314 return NULL;
2331 return NULL;
2315
2332
2316 if (nodelen < 1) {
2333 if (nodelen < 1) {
@@ -2377,7 +2394,7 b' static PyObject *index_shortest(indexObj'
2377 raise_revlog_error();
2394 raise_revlog_error();
2378 return NULL;
2395 return NULL;
2379 }
2396 }
2380 return PyInt_FromLong(length);
2397 return PyLong_FromLong(length);
2381 }
2398 }
2382
2399
2383 static PyObject *index_m_get(indexObject *self, PyObject *args)
2400 static PyObject *index_m_get(indexObject *self, PyObject *args)
@@ -2395,14 +2412,14 b' static PyObject *index_m_get(indexObject'
2395 return NULL;
2412 return NULL;
2396 if (rev == -2)
2413 if (rev == -2)
2397 Py_RETURN_NONE;
2414 Py_RETURN_NONE;
2398 return PyInt_FromLong(rev);
2415 return PyLong_FromLong(rev);
2399 }
2416 }
2400
2417
2401 static int index_contains(indexObject *self, PyObject *value)
2418 static int index_contains(indexObject *self, PyObject *value)
2402 {
2419 {
2403 char *node;
2420 char *node;
2404
2421
2405 if (PyInt_Check(value)) {
2422 if (PyLong_Check(value)) {
2406 long rev;
2423 long rev;
2407 if (!pylong_to_long(value, &rev)) {
2424 if (!pylong_to_long(value, &rev)) {
2408 return -1;
2425 return -1;
@@ -2440,7 +2457,7 b' static PyObject *index_m_rev(indexObject'
2440 return NULL;
2457 return NULL;
2441 rev = index_find_node(self, node);
2458 rev = index_find_node(self, node);
2442 if (rev >= -1)
2459 if (rev >= -1)
2443 return PyInt_FromLong(rev);
2460 return PyLong_FromLong(rev);
2444 if (rev == -2)
2461 if (rev == -2)
2445 raise_revlog_error();
2462 raise_revlog_error();
2446 return NULL;
2463 return NULL;
@@ -2493,7 +2510,7 b' static PyObject *find_gca_candidates(ind'
2493 if (sv < poison) {
2510 if (sv < poison) {
2494 interesting -= 1;
2511 interesting -= 1;
2495 if (sv == allseen) {
2512 if (sv == allseen) {
2496 PyObject *obj = PyInt_FromLong(v);
2513 PyObject *obj = PyLong_FromLong(v);
2497 if (obj == NULL)
2514 if (obj == NULL)
2498 goto bail;
2515 goto bail;
2499 if (PyList_Append(gca, obj) == -1) {
2516 if (PyList_Append(gca, obj) == -1) {
@@ -2561,7 +2578,7 b' static PyObject *find_deepest(indexObjec'
2561 }
2578 }
2562
2579
2563 for (i = 0; i < revcount; i++) {
2580 for (i = 0; i < revcount; i++) {
2564 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
2581 int n = (int)PyLong_AsLong(PyList_GET_ITEM(revs, i));
2565 if (n > maxrev)
2582 if (n > maxrev)
2566 maxrev = n;
2583 maxrev = n;
2567 }
2584 }
@@ -2586,7 +2603,7 b' static PyObject *find_deepest(indexObjec'
2586 goto bail;
2603 goto bail;
2587
2604
2588 for (i = 0; i < revcount; i++) {
2605 for (i = 0; i < revcount; i++) {
2589 int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i));
2606 int n = (int)PyLong_AsLong(PyList_GET_ITEM(revs, i));
2590 long b = 1l << i;
2607 long b = 1l << i;
2591 depth[n] = 1;
2608 depth[n] = 1;
2592 seen[n] = b;
2609 seen[n] = b;
@@ -2716,13 +2733,13 b' static PyObject *index_commonancestorshe'
2716 bitmask x;
2733 bitmask x;
2717 long val;
2734 long val;
2718
2735
2719 if (!PyInt_Check(obj)) {
2736 if (!PyLong_Check(obj)) {
2720 PyErr_SetString(PyExc_TypeError,
2737 PyErr_SetString(PyExc_TypeError,
2721 "arguments must all be ints");
2738 "arguments must all be ints");
2722 Py_DECREF(obj);
2739 Py_DECREF(obj);
2723 goto bail;
2740 goto bail;
2724 }
2741 }
2725 val = PyInt_AsLong(obj);
2742 val = PyLong_AsLong(obj);
2726 Py_DECREF(obj);
2743 Py_DECREF(obj);
2727 if (val == -1) {
2744 if (val == -1) {
2728 ret = PyList_New(0);
2745 ret = PyList_New(0);
@@ -2763,7 +2780,7 b' static PyObject *index_commonancestorshe'
2763 ret = PyList_New(1);
2780 ret = PyList_New(1);
2764 if (ret == NULL)
2781 if (ret == NULL)
2765 goto bail;
2782 goto bail;
2766 obj = PyInt_FromLong(revs[0]);
2783 obj = PyLong_FromLong(revs[0]);
2767 if (obj == NULL)
2784 if (obj == NULL)
2768 goto bail;
2785 goto bail;
2769 PyList_SET_ITEM(ret, 0, obj);
2786 PyList_SET_ITEM(ret, 0, obj);
@@ -2834,14 +2851,8 b' static int index_slice_del(indexObject *'
2834 Py_ssize_t length = index_length(self) + 1;
2851 Py_ssize_t length = index_length(self) + 1;
2835 int ret = 0;
2852 int ret = 0;
2836
2853
2837 /* Argument changed from PySliceObject* to PyObject* in Python 3. */
2838 #ifdef IS_PY3K
2839 if (PySlice_GetIndicesEx(item, length, &start, &stop, &step,
2854 if (PySlice_GetIndicesEx(item, length, &start, &stop, &step,
2840 &slicelength) < 0)
2855 &slicelength) < 0)
2841 #else
2842 if (PySlice_GetIndicesEx((PySliceObject *)item, length, &start, &stop,
2843 &step, &slicelength) < 0)
2844 #endif
2845 return -1;
2856 return -1;
2846
2857
2847 if (slicelength <= 0)
2858 if (slicelength <= 0)
@@ -2925,7 +2936,7 b' static int index_assign_subscript(indexO'
2925 if (value == NULL)
2936 if (value == NULL)
2926 return self->ntinitialized ? nt_delete_node(&self->nt, node)
2937 return self->ntinitialized ? nt_delete_node(&self->nt, node)
2927 : 0;
2938 : 0;
2928 rev = PyInt_AsLong(value);
2939 rev = PyLong_AsLong(value);
2929 if (rev > INT_MAX || rev < 0) {
2940 if (rev > INT_MAX || rev < 0) {
2930 if (!PyErr_Occurred())
2941 if (!PyErr_Occurred())
2931 PyErr_SetString(PyExc_ValueError, "rev out of range");
2942 PyErr_SetString(PyExc_ValueError, "rev out of range");
@@ -3027,10 +3038,9 b' static int index_init(indexObject *self,'
3027 self->entry_size = cl2_entry_size;
3038 self->entry_size = cl2_entry_size;
3028 }
3039 }
3029
3040
3030 self->nullentry =
3041 self->nullentry = Py_BuildValue(
3031 Py_BuildValue(PY23("iiiiiiis#iiBBi", "iiiiiiiy#iiBBi"), 0, 0, 0, -1,
3042 "iiiiiiiy#iiBBi", 0, 0, 0, -1, -1, -1, -1, nullid, self->nodelen, 0,
3032 -1, -1, -1, nullid, self->nodelen, 0, 0,
3043 0, comp_mode_inline, comp_mode_inline, rank_unknown);
3033 comp_mode_inline, comp_mode_inline, rank_unknown);
3034
3044
3035 if (!self->nullentry)
3045 if (!self->nullentry)
3036 return -1;
3046 return -1;
@@ -3266,10 +3276,7 b' bail:'
3266 static Revlog_CAPI CAPI = {
3276 static Revlog_CAPI CAPI = {
3267 /* increment the abi_version field upon each change in the Revlog_CAPI
3277 /* increment the abi_version field upon each change in the Revlog_CAPI
3268 struct or in the ABI of the listed functions */
3278 struct or in the ABI of the listed functions */
3269 2,
3279 3, index_length, index_node, index_fast_rank, HgRevlogIndex_GetParents,
3270 index_length,
3271 index_node,
3272 HgRevlogIndex_GetParents,
3273 };
3280 };
3274
3281
3275 void revlog_module_init(PyObject *mod)
3282 void revlog_module_init(PyObject *mod)
@@ -10,17 +10,6 b''
10
10
11 #include "compat.h"
11 #include "compat.h"
12
12
13 #if PY_MAJOR_VERSION >= 3
14 #define IS_PY3K
15 #endif
16
17 /* helper to switch things like string literal depending on Python version */
18 #ifdef IS_PY3K
19 #define PY23(py2, py3) py3
20 #else
21 #define PY23(py2, py3) py2
22 #endif
23
24 /* clang-format off */
13 /* clang-format off */
25 typedef struct {
14 typedef struct {
26 PyObject_HEAD
15 PyObject_HEAD
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import struct
9 import struct
11
10
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import cffi
1 import cffi
4 import os
2 import os
5
3
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from ..pure.mpatch import *
9 from ..pure.mpatch import *
11 from ..pure.mpatch import mpatchError # silence pyflakes
10 from ..pure.mpatch import mpatchError # silence pyflakes
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import cffi
1 import cffi
4 import os
2 import os
5
3
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import os
9 import os
11 import stat as statmod
10 import stat as statmod
@@ -34,7 +33,7 b' if pycompat.isdarwin:'
34 attrkinds[lib.VFIFO] = statmod.S_IFIFO
33 attrkinds[lib.VFIFO] = statmod.S_IFIFO
35 attrkinds[lib.VSOCK] = statmod.S_IFSOCK
34 attrkinds[lib.VSOCK] = statmod.S_IFSOCK
36
35
37 class stat_res(object):
36 class stat_res:
38 def __init__(self, st_mode, st_mtime, st_size):
37 def __init__(self, st_mode, st_mtime, st_size):
39 self.st_mode = st_mode
38 self.st_mode = st_mode
40 self.st_mtime = st_mtime
39 self.st_mtime = st_mtime
@@ -1,5 +1,3 b''
1 from __future__ import absolute_import
2
3 import cffi
1 import cffi
4
2
5 ffi = cffi.FFI()
3 ffi = cffi.FFI()
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import os
9 import os
11 import struct
10 import struct
@@ -106,7 +105,7 b' def writechunks(ui, chunks, filename, vf'
106 os.unlink(cleanup)
105 os.unlink(cleanup)
107
106
108
107
109 class cg1unpacker(object):
108 class cg1unpacker:
110 """Unpacker for cg1 changegroup streams.
109 """Unpacker for cg1 changegroup streams.
111
110
112 A changegroup unpacker handles the framing of the revision data in
111 A changegroup unpacker handles the framing of the revision data in
@@ -421,11 +420,11 b' class cg1unpacker(object):'
421 cl = repo.changelog
420 cl = repo.changelog
422 ml = repo.manifestlog
421 ml = repo.manifestlog
423 # validate incoming csets have their manifests
422 # validate incoming csets have their manifests
424 for cset in pycompat.xrange(clstart, clend):
423 for cset in range(clstart, clend):
425 mfnode = cl.changelogrevision(cset).manifest
424 mfnode = cl.changelogrevision(cset).manifest
426 mfest = ml[mfnode].readdelta()
425 mfest = ml[mfnode].readdelta()
427 # store file nodes we must see
426 # store file nodes we must see
428 for f, n in pycompat.iteritems(mfest):
427 for f, n in mfest.items():
429 needfiles.setdefault(f, set()).add(n)
428 needfiles.setdefault(f, set()).add(n)
430
429
431 on_filelog_rev = None
430 on_filelog_rev = None
@@ -510,7 +509,7 b' class cg1unpacker(object):'
510 **pycompat.strkwargs(hookargs)
509 **pycompat.strkwargs(hookargs)
511 )
510 )
512
511
513 added = pycompat.xrange(clstart, clend)
512 added = range(clstart, clend)
514 phaseall = None
513 phaseall = None
515 if srctype in (b'push', b'serve'):
514 if srctype in (b'push', b'serve'):
516 # Old servers can not push the boundary themselves.
515 # Old servers can not push the boundary themselves.
@@ -692,7 +691,7 b' class cg4unpacker(cg3unpacker):'
692 )
691 )
693
692
694
693
695 class headerlessfixup(object):
694 class headerlessfixup:
696 def __init__(self, fh, h):
695 def __init__(self, fh, h):
697 self._h = h
696 self._h = h
698 self._fh = fh
697 self._fh = fh
@@ -826,7 +825,7 b' def _resolvenarrowrevisioninfo('
826 # somewhat unsurprised to find a case in the wild
825 # somewhat unsurprised to find a case in the wild
827 # where this breaks down a bit. That said, I don't
826 # where this breaks down a bit. That said, I don't
828 # know if it would hurt anything.
827 # know if it would hurt anything.
829 for i in pycompat.xrange(rev, 0, -1):
828 for i in range(rev, 0, -1):
830 if store.linkrev(i) == clrev:
829 if store.linkrev(i) == clrev:
831 return i
830 return i
832 # We failed to resolve a parent for this node, so
831 # We failed to resolve a parent for this node, so
@@ -1004,7 +1003,7 b' def deltagroup('
1004 progress.complete()
1003 progress.complete()
1005
1004
1006
1005
1007 class cgpacker(object):
1006 class cgpacker:
1008 def __init__(
1007 def __init__(
1009 self,
1008 self,
1010 repo,
1009 repo,
@@ -1957,7 +1956,7 b' def _addchangegroupfiles('
1957 revisions += len(fl) - o
1956 revisions += len(fl) - o
1958 if f in needfiles:
1957 if f in needfiles:
1959 needs = needfiles[f]
1958 needs = needfiles[f]
1960 for new in pycompat.xrange(o, len(fl)):
1959 for new in range(o, len(fl)):
1961 n = fl.node(new)
1960 n = fl.node(new)
1962 if n in needs:
1961 if n in needs:
1963 needs.remove(n)
1962 needs.remove(n)
@@ -1967,7 +1966,7 b' def _addchangegroupfiles('
1967 del needfiles[f]
1966 del needfiles[f]
1968 progress.complete()
1967 progress.complete()
1969
1968
1970 for f, needs in pycompat.iteritems(needfiles):
1969 for f, needs in needfiles.items():
1971 fl = repo.file(f)
1970 fl = repo.file(f)
1972 for n in needs:
1971 for n in needs:
1973 try:
1972 try:
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from .i18n import _
9 from .i18n import _
11 from .node import (
10 from .node import (
@@ -92,7 +91,7 b' def stripdesc(desc):'
92 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
91 return b'\n'.join([l.rstrip() for l in desc.splitlines()]).strip(b'\n')
93
92
94
93
95 class appender(object):
94 class appender:
96 """the changelog index must be updated last on disk, so we use this class
95 """the changelog index must be updated last on disk, so we use this class
97 to delay writes to it"""
96 to delay writes to it"""
98
97
@@ -162,7 +161,7 b' class appender(object):'
162 return self.fp.__exit__(*args)
161 return self.fp.__exit__(*args)
163
162
164
163
165 class _divertopener(object):
164 class _divertopener:
166 def __init__(self, opener, target):
165 def __init__(self, opener, target):
167 self._opener = opener
166 self._opener = opener
168 self._target = target
167 self._target = target
@@ -189,7 +188,7 b' def _delayopener(opener, target, buf):'
189
188
190
189
191 @attr.s
190 @attr.s
192 class _changelogrevision(object):
191 class _changelogrevision:
193 # Extensions might modify _defaultextra, so let the constructor below pass
192 # Extensions might modify _defaultextra, so let the constructor below pass
194 # it in
193 # it in
195 extra = attr.ib()
194 extra = attr.ib()
@@ -205,7 +204,7 b' class _changelogrevision(object):'
205 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
204 branchinfo = attr.ib(default=(_defaultextra[b'branch'], False))
206
205
207
206
208 class changelogrevision(object):
207 class changelogrevision:
209 """Holds results of a parsed changelog revision.
208 """Holds results of a parsed changelog revision.
210
209
211 Changelog revisions consist of multiple pieces of data, including
210 Changelog revisions consist of multiple pieces of data, including
@@ -39,7 +39,6 b' Config'
39 skiphash = False
39 skiphash = False
40 """
40 """
41
41
42 from __future__ import absolute_import
43
42
44 import inspect
43 import inspect
45 import os
44 import os
@@ -135,7 +134,7 b' def _confighash(ui):'
135 ignored = set()
134 ignored = set()
136 envitems = [
135 envitems = [
137 (k, v)
136 (k, v)
138 for k, v in pycompat.iteritems(encoding.environ)
137 for k, v in encoding.environ.items()
139 if _envre.match(k) and k not in ignored
138 if _envre.match(k) and k not in ignored
140 ]
139 ]
141 envhash = _hashlist(sorted(envitems))
140 envhash = _hashlist(sorted(envitems))
@@ -197,7 +196,7 b' def _mtimehash(paths):'
197 return _hashlist(pycompat.maplist(trystat, paths))[:12]
196 return _hashlist(pycompat.maplist(trystat, paths))[:12]
198
197
199
198
200 class hashstate(object):
199 class hashstate:
201 """a structure storing confighash, mtimehash, paths used for mtimehash"""
200 """a structure storing confighash, mtimehash, paths used for mtimehash"""
202
201
203 def __init__(self, confighash, mtimehash, mtimepaths):
202 def __init__(self, confighash, mtimehash, mtimepaths):
@@ -293,7 +292,7 b' def _loadnewui(srcui, args, cdebug):'
293 return (newui, newlui)
292 return (newui, newlui)
294
293
295
294
296 class channeledsystem(object):
295 class channeledsystem:
297 """Propagate ui.system() request in the following format:
296 """Propagate ui.system() request in the following format:
298
297
299 payload length (unsigned int),
298 payload length (unsigned int),
@@ -321,7 +320,7 b' class channeledsystem(object):'
321
320
322 def __call__(self, cmd, environ, cwd=None, type=b'system', cmdtable=None):
321 def __call__(self, cmd, environ, cwd=None, type=b'system', cmdtable=None):
323 args = [type, cmd, util.abspath(cwd or b'.')]
322 args = [type, cmd, util.abspath(cwd or b'.')]
324 args.extend(b'%s=%s' % (k, v) for k, v in pycompat.iteritems(environ))
323 args.extend(b'%s=%s' % (k, v) for k, v in environ.items())
325 data = b'\0'.join(args)
324 data = b'\0'.join(args)
326 self.out.write(struct.pack(b'>cI', self.channel, len(data)))
325 self.out.write(struct.pack(b'>cI', self.channel, len(data)))
327 self.out.write(data)
326 self.out.write(data)
@@ -390,7 +389,17 b' class chgcmdserver(commandserver.server)'
390 # tell client to sendmsg() with 1-byte payload, which makes it
389 # tell client to sendmsg() with 1-byte payload, which makes it
391 # distinctive from "attachio\n" command consumed by client.read()
390 # distinctive from "attachio\n" command consumed by client.read()
392 self.clientsock.sendall(struct.pack(b'>cI', b'I', 1))
391 self.clientsock.sendall(struct.pack(b'>cI', b'I', 1))
393 clientfds = util.recvfds(self.clientsock.fileno())
392
393 data, ancdata, msg_flags, address = self.clientsock.recvmsg(1, 256)
394 assert len(ancdata) == 1
395 cmsg_level, cmsg_type, cmsg_data = ancdata[0]
396 assert cmsg_level == socket.SOL_SOCKET
397 assert cmsg_type == socket.SCM_RIGHTS
398 # memoryview.cast() was added in typeshed 61600d68772a, but pytype
399 # still complains
400 # pytype: disable=attribute-error
401 clientfds = memoryview(cmsg_data).cast('i').tolist()
402 # pytype: enable=attribute-error
394 self.ui.log(b'chgserver', b'received fds: %r\n', clientfds)
403 self.ui.log(b'chgserver', b'received fds: %r\n', clientfds)
395
404
396 ui = self.ui
405 ui = self.ui
@@ -409,22 +418,13 b' class chgcmdserver(commandserver.server)'
409 # be unbuffered no matter if it is a tty or not.
418 # be unbuffered no matter if it is a tty or not.
410 if fn == b'ferr':
419 if fn == b'ferr':
411 newfp = fp
420 newfp = fp
412 elif pycompat.ispy3:
421 else:
413 # On Python 3, the standard library doesn't offer line-buffered
422 # On Python 3, the standard library doesn't offer line-buffered
414 # binary streams, so wrap/unwrap it.
423 # binary streams, so wrap/unwrap it.
415 if fp.isatty():
424 if fp.isatty():
416 newfp = procutil.make_line_buffered(fp)
425 newfp = procutil.make_line_buffered(fp)
417 else:
426 else:
418 newfp = procutil.unwrap_line_buffered(fp)
427 newfp = procutil.unwrap_line_buffered(fp)
419 else:
420 # Python 2 uses the I/O streams provided by the C library, so
421 # make it line-buffered explicitly. Otherwise the default would
422 # be decided on first write(), where fout could be a pager.
423 if fp.isatty():
424 bufsize = 1 # line buffered
425 else:
426 bufsize = -1 # system default
427 newfp = os.fdopen(fp.fileno(), mode, bufsize)
428 if newfp is not fp:
428 if newfp is not fp:
429 setattr(ui, fn, newfp)
429 setattr(ui, fn, newfp)
430 setattr(self, cn, newfp)
430 setattr(self, cn, newfp)
@@ -448,17 +448,8 b' class chgcmdserver(commandserver.server)'
448 nullfd = os.open(os.devnull, os.O_WRONLY)
448 nullfd = os.open(os.devnull, os.O_WRONLY)
449 ui = self.ui
449 ui = self.ui
450 for (ch, fp, fd), (cn, fn, mode) in zip(self._oldios, _iochannels):
450 for (ch, fp, fd), (cn, fn, mode) in zip(self._oldios, _iochannels):
451 newfp = getattr(ui, fn)
452 # On Python 2, newfp and fp may be separate file objects associated
453 # with the same fd, so we must close newfp while it's associated
454 # with the client. Otherwise the new associated fd would be closed
455 # when newfp gets deleted. On Python 3, newfp is just a wrapper
456 # around fp even if newfp is not fp, so deleting newfp is safe.
457 if not (pycompat.ispy3 or newfp is fp):
458 newfp.close()
459 # restore original fd: fp is open again
460 try:
451 try:
461 if (pycompat.ispy3 or newfp is fp) and 'w' in mode:
452 if 'w' in mode:
462 # Discard buffered data which couldn't be flushed because
453 # Discard buffered data which couldn't be flushed because
463 # of EPIPE. The data should belong to the current session
454 # of EPIPE. The data should belong to the current session
464 # and should never persist.
455 # and should never persist.
@@ -636,7 +627,7 b' def _hashaddress(address, hashstr):'
636 return b'%s-%s' % (os.path.join(dirname, basename), hashstr)
627 return b'%s-%s' % (os.path.join(dirname, basename), hashstr)
637
628
638
629
639 class chgunixservicehandler(object):
630 class chgunixservicehandler:
640 """Set of operations for chg services"""
631 """Set of operations for chg services"""
641
632
642 pollinterval = 1 # [sec]
633 pollinterval = 1 # [sec]
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import copy as copymod
9 import copy as copymod
11 import errno
10 import errno
@@ -562,9 +561,8 b' def dorecord('
562 backupdir = repo.vfs.join(b'record-backups')
561 backupdir = repo.vfs.join(b'record-backups')
563 try:
562 try:
564 os.mkdir(backupdir)
563 os.mkdir(backupdir)
565 except OSError as err:
564 except FileExistsError:
566 if err.errno != errno.EEXIST:
565 pass
567 raise
568 try:
566 try:
569 # backup continues
567 # backup continues
570 for f in tobackup:
568 for f in tobackup:
@@ -627,7 +625,7 b' def dorecord('
627 # 5. finally restore backed-up files
625 # 5. finally restore backed-up files
628 try:
626 try:
629 dirstate = repo.dirstate
627 dirstate = repo.dirstate
630 for realname, tmpname in pycompat.iteritems(backups):
628 for realname, tmpname in backups.items():
631 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
629 ui.debug(b'restoring %r to %r\n' % (tmpname, realname))
632
630
633 if dirstate.get_entry(realname).maybe_clean:
631 if dirstate.get_entry(realname).maybe_clean:
@@ -667,7 +665,7 b' def dorecord('
667 return commit(ui, repo, recordinwlock, pats, opts)
665 return commit(ui, repo, recordinwlock, pats, opts)
668
666
669
667
670 class dirnode(object):
668 class dirnode:
671 """
669 """
672 Represent a directory in user working copy with information required for
670 Represent a directory in user working copy with information required for
673 the purpose of tersing its status.
671 the purpose of tersing its status.
@@ -833,7 +831,7 b' def _commentlines(raw):'
833
831
834
832
835 @attr.s(frozen=True)
833 @attr.s(frozen=True)
836 class morestatus(object):
834 class morestatus:
837 reporoot = attr.ib()
835 reporoot = attr.ib()
838 unfinishedop = attr.ib()
836 unfinishedop = attr.ib()
839 unfinishedmsg = attr.ib()
837 unfinishedmsg = attr.ib()
@@ -1344,7 +1342,7 b' def isstdiofilename(pat):'
1344 return not pat or pat == b'-'
1342 return not pat or pat == b'-'
1345
1343
1346
1344
1347 class _unclosablefile(object):
1345 class _unclosablefile:
1348 def __init__(self, fp):
1346 def __init__(self, fp):
1349 self._fp = fp
1347 self._fp = fp
1350
1348
@@ -2934,16 +2932,15 b' def amend(ui, repo, old, extra, pats, op'
2934
2932
2935 def filectxfn(repo, ctx_, path):
2933 def filectxfn(repo, ctx_, path):
2936 try:
2934 try:
2937 # Return None for removed files.
2938 if path in wctx.removed() and path in filestoamend:
2939 return None
2940
2941 # If the file being considered is not amongst the files
2935 # If the file being considered is not amongst the files
2942 # to be amended, we should use the file context from the
2936 # to be amended, we should use the file context from the
2943 # old changeset. This avoids issues when only some files in
2937 # old changeset. This avoids issues when only some files in
2944 # the working copy are being amended but there are also
2938 # the working copy are being amended but there are also
2945 # changes to other files from the old changeset.
2939 # changes to other files from the old changeset.
2946 if path in filestoamend:
2940 if path in filestoamend:
2941 # Return None for removed files.
2942 if path in wctx.removed():
2943 return None
2947 fctx = wctx[path]
2944 fctx = wctx[path]
2948 else:
2945 else:
2949 fctx = old.filectx(path)
2946 fctx = old.filectx(path)
@@ -3750,10 +3747,18 b' def _performrevert('
3750
3747
3751 for f in actions[b'add'][0]:
3748 for f in actions[b'add'][0]:
3752 # Don't checkout modified files, they are already created by the diff
3749 # Don't checkout modified files, they are already created by the diff
3753 if f not in newlyaddedandmodifiedfiles:
3750 if f in newlyaddedandmodifiedfiles:
3754 prntstatusmsg(b'add', f)
3751 continue
3755 checkout(f)
3752
3756 repo.dirstate.set_tracked(f)
3753 if interactive:
3754 choice = repo.ui.promptchoice(
3755 _(b"add new file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
3756 )
3757 if choice != 0:
3758 continue
3759 prntstatusmsg(b'add', f)
3760 checkout(f)
3761 repo.dirstate.set_tracked(f)
3757
3762
3758 for f in actions[b'undelete'][0]:
3763 for f in actions[b'undelete'][0]:
3759 if interactive:
3764 if interactive:
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import re
9 import re
11
10
@@ -5,9 +5,7 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8
9
10 import errno
11 import os
9 import os
12 import re
10 import re
13 import sys
11 import sys
@@ -1572,7 +1570,7 b' def bundle(ui, repo, fname, *dests, **op'
1572 pycompat.bytestr(e),
1570 pycompat.bytestr(e),
1573 hint=_(b"see 'hg help bundlespec' for supported values for --type"),
1571 hint=_(b"see 'hg help bundlespec' for supported values for --type"),
1574 )
1572 )
1575 cgversion = bundlespec.contentopts[b"cg.version"]
1573 cgversion = bundlespec.params[b"cg.version"]
1576
1574
1577 # Packed bundles are a pseudo bundle format for now.
1575 # Packed bundles are a pseudo bundle format for now.
1578 if cgversion == b's1':
1576 if cgversion == b's1':
@@ -1601,8 +1599,9 b' def bundle(ui, repo, fname, *dests, **op'
1601 raise error.InputError(
1599 raise error.InputError(
1602 _(b"--base is incompatible with specifying destinations")
1600 _(b"--base is incompatible with specifying destinations")
1603 )
1601 )
1604 common = [repo[rev].node() for rev in base]
1602 cl = repo.changelog
1605 heads = [repo[r].node() for r in revs] if revs else None
1603 common = [cl.node(rev) for rev in base]
1604 heads = [cl.node(r) for r in revs] if revs else None
1606 outgoing = discovery.outgoing(repo, common, heads)
1605 outgoing = discovery.outgoing(repo, common, heads)
1607 missing = outgoing.missing
1606 missing = outgoing.missing
1608 excluded = outgoing.excluded
1607 excluded = outgoing.excluded
@@ -1681,14 +1680,14 b' def bundle(ui, repo, fname, *dests, **op'
1681 # Bundling of obsmarker and phases is optional as not all clients
1680 # Bundling of obsmarker and phases is optional as not all clients
1682 # support the necessary features.
1681 # support the necessary features.
1683 cfg = ui.configbool
1682 cfg = ui.configbool
1684 contentopts = {
1683 obsolescence_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker')
1685 b'obsolescence': cfg(b'experimental', b'evolution.bundle-obsmarker'),
1684 bundlespec.set_param(b'obsolescence', obsolescence_cfg, overwrite=False)
1686 b'obsolescence-mandatory': cfg(
1685 obs_mand_cfg = cfg(b'experimental', b'evolution.bundle-obsmarker:mandatory')
1687 b'experimental', b'evolution.bundle-obsmarker:mandatory'
1686 bundlespec.set_param(
1688 ),
1687 b'obsolescence-mandatory', obs_mand_cfg, overwrite=False
1689 b'phases': cfg(b'experimental', b'bundle-phases'),
1688 )
1690 }
1689 phases_cfg = cfg(b'experimental', b'bundle-phases')
1691 bundlespec.contentopts.update(contentopts)
1690 bundlespec.set_param(b'phases', phases_cfg, overwrite=False)
1692
1691
1693 bundle2.writenewbundle(
1692 bundle2.writenewbundle(
1694 ui,
1693 ui,
@@ -1697,7 +1696,7 b' def bundle(ui, repo, fname, *dests, **op'
1697 fname,
1696 fname,
1698 bversion,
1697 bversion,
1699 outgoing,
1698 outgoing,
1700 bundlespec.contentopts,
1699 bundlespec.params,
1701 compression=bcompression,
1700 compression=bcompression,
1702 compopts=compopts,
1701 compopts=compopts,
1703 )
1702 )
@@ -2477,7 +2476,7 b' def copy(ui, repo, *pats, **opts):'
2477 )
2476 )
2478 def debugcommands(ui, cmd=b'', *args):
2477 def debugcommands(ui, cmd=b'', *args):
2479 """list all available commands and options"""
2478 """list all available commands and options"""
2480 for cmd, vals in sorted(pycompat.iteritems(table)):
2479 for cmd, vals in sorted(table.items()):
2481 cmd = cmd.split(b'|')[0]
2480 cmd = cmd.split(b'|')[0]
2482 opts = b', '.join([i[1] for i in vals[1]])
2481 opts = b', '.join([i[1] for i in vals[1]])
2483 ui.write(b'%s: %s\n' % (cmd, opts))
2482 ui.write(b'%s: %s\n' % (cmd, opts))
@@ -2544,7 +2543,8 b' def diff(ui, repo, *pats, **opts):'
2544
2543
2545 :hg:`diff` may generate unexpected results for merges, as it will
2544 :hg:`diff` may generate unexpected results for merges, as it will
2546 default to comparing against the working directory's first
2545 default to comparing against the working directory's first
2547 parent changeset if no revisions are specified.
2546 parent changeset if no revisions are specified. To diff against the
2547 conflict regions, you can use `--config diff.merge=yes`.
2548
2548
2549 By default, the working directory files are compared to its first parent. To
2549 By default, the working directory files are compared to its first parent. To
2550 see the differences from another revision, use --from. To see the difference
2550 see the differences from another revision, use --from. To see the difference
@@ -3918,9 +3918,7 b' def identify('
3918 hexremoterev = hex(remoterev)
3918 hexremoterev = hex(remoterev)
3919 bms = [
3919 bms = [
3920 bm
3920 bm
3921 for bm, bmr in pycompat.iteritems(
3921 for bm, bmr in peer.listkeys(b'bookmarks').items()
3922 peer.listkeys(b'bookmarks')
3923 )
3924 if bmr == hexremoterev
3922 if bmr == hexremoterev
3925 ]
3923 ]
3926
3924
@@ -6183,9 +6181,8 b' def resolve(ui, repo, *pats, **opts):'
6183 a = repo.wjoin(f)
6181 a = repo.wjoin(f)
6184 try:
6182 try:
6185 util.copyfile(a, a + b".resolve")
6183 util.copyfile(a, a + b".resolve")
6186 except (IOError, OSError) as inst:
6184 except FileNotFoundError:
6187 if inst.errno != errno.ENOENT:
6185 pass
6188 raise
6189
6186
6190 try:
6187 try:
6191 # preresolve file
6188 # preresolve file
@@ -6202,9 +6199,8 b' def resolve(ui, repo, *pats, **opts):'
6202 util.rename(
6199 util.rename(
6203 a + b".resolve", scmutil.backuppath(ui, repo, f)
6200 a + b".resolve", scmutil.backuppath(ui, repo, f)
6204 )
6201 )
6205 except OSError as inst:
6202 except FileNotFoundError:
6206 if inst.errno != errno.ENOENT:
6203 pass
6207 raise
6208
6204
6209 if hasconflictmarkers:
6205 if hasconflictmarkers:
6210 ui.warn(
6206 ui.warn(
@@ -7097,7 +7093,7 b' def summary(ui, repo, **opts):'
7097
7093
7098 c = repo.dirstate.copies()
7094 c = repo.dirstate.copies()
7099 copied, renamed = [], []
7095 copied, renamed = [], []
7100 for d, s in pycompat.iteritems(c):
7096 for d, s in c.items():
7101 if s in status.removed:
7097 if s in status.removed:
7102 status.removed.remove(s)
7098 status.removed.remove(s)
7103 renamed.append(d)
7099 renamed.append(d)
@@ -5,24 +5,16 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import errno
11 import gc
9 import gc
12 import os
10 import os
13 import random
11 import random
12 import selectors
14 import signal
13 import signal
15 import socket
14 import socket
16 import struct
15 import struct
17 import traceback
16 import traceback
18
17
19 try:
20 import selectors
21
22 selectors.BaseSelector
23 except ImportError:
24 from .thirdparty import selectors2 as selectors
25
26 from .i18n import _
18 from .i18n import _
27 from .pycompat import getattr
19 from .pycompat import getattr
28 from . import (
20 from . import (
@@ -40,7 +32,7 b' from .utils import ('
40 )
32 )
41
33
42
34
43 class channeledoutput(object):
35 class channeledoutput:
44 """
36 """
45 Write data to out in the following format:
37 Write data to out in the following format:
46
38
@@ -69,7 +61,7 b' class channeledoutput(object):'
69 return getattr(self.out, attr)
61 return getattr(self.out, attr)
70
62
71
63
72 class channeledmessage(object):
64 class channeledmessage:
73 """
65 """
74 Write encoded message and metadata to out in the following format:
66 Write encoded message and metadata to out in the following format:
75
67
@@ -98,7 +90,7 b' class channeledmessage(object):'
98 return getattr(self._cout, attr)
90 return getattr(self._cout, attr)
99
91
100
92
101 class channeledinput(object):
93 class channeledinput:
102 """
94 """
103 Read data from in_.
95 Read data from in_.
104
96
@@ -201,7 +193,7 b' def _selectmessageencoder(ui):'
201 )
193 )
202
194
203
195
204 class server(object):
196 class server:
205 """
197 """
206 Listens for commands on fin, runs them and writes the output on a channel
198 Listens for commands on fin, runs them and writes the output on a channel
207 based stream to fout.
199 based stream to fout.
@@ -451,7 +443,7 b' def setuplogging(ui, repo=None, fp=None)'
451 u.setlogger(b'cmdserver', logger)
443 u.setlogger(b'cmdserver', logger)
452
444
453
445
454 class pipeservice(object):
446 class pipeservice:
455 def __init__(self, ui, repo, opts):
447 def __init__(self, ui, repo, opts):
456 self.ui = ui
448 self.ui = ui
457 self.repo = repo
449 self.repo = repo
@@ -501,9 +493,8 b' def _serverequest(ui, repo, conn, create'
501 # known exceptions are caught by dispatch.
493 # known exceptions are caught by dispatch.
502 except error.Abort as inst:
494 except error.Abort as inst:
503 ui.error(_(b'abort: %s\n') % inst.message)
495 ui.error(_(b'abort: %s\n') % inst.message)
504 except IOError as inst:
496 except BrokenPipeError:
505 if inst.errno != errno.EPIPE:
497 pass
506 raise
507 except KeyboardInterrupt:
498 except KeyboardInterrupt:
508 pass
499 pass
509 finally:
500 finally:
@@ -521,12 +512,11 b' def _serverequest(ui, repo, conn, create'
521 fin.close()
512 fin.close()
522 try:
513 try:
523 fout.close() # implicit flush() may cause another EPIPE
514 fout.close() # implicit flush() may cause another EPIPE
524 except IOError as inst:
515 except BrokenPipeError:
525 if inst.errno != errno.EPIPE:
516 pass
526 raise
527
517
528
518
529 class unixservicehandler(object):
519 class unixservicehandler:
530 """Set of pluggable operations for unix-mode services
520 """Set of pluggable operations for unix-mode services
531
521
532 Almost all methods except for createcmdserver() are called in the main
522 Almost all methods except for createcmdserver() are called in the main
@@ -560,7 +550,7 b' class unixservicehandler(object):'
560 return server(self.ui, repo, fin, fout, prereposetups)
550 return server(self.ui, repo, fin, fout, prereposetups)
561
551
562
552
563 class unixforkingservice(object):
553 class unixforkingservice:
564 """
554 """
565 Listens on unix domain socket and forks server per connection
555 Listens on unix domain socket and forks server per connection
566 """
556 """
@@ -645,15 +635,7 b' class unixforkingservice(object):'
645 # waiting for recv() will receive ECONNRESET.
635 # waiting for recv() will receive ECONNRESET.
646 self._unlinksocket()
636 self._unlinksocket()
647 exiting = True
637 exiting = True
648 try:
638 events = selector.select(timeout=h.pollinterval)
649 events = selector.select(timeout=h.pollinterval)
650 except OSError as inst:
651 # selectors2 raises ETIMEDOUT if timeout exceeded while
652 # handling signal interrupt. That's probably wrong, but
653 # we can easily get around it.
654 if inst.errno != errno.ETIMEDOUT:
655 raise
656 events = []
657 if not events:
639 if not events:
658 # only exit if we completed all queued requests
640 # only exit if we completed all queued requests
659 if exiting:
641 if exiting:
@@ -665,12 +647,7 b' class unixforkingservice(object):'
665
647
666 def _acceptnewconnection(self, sock, selector):
648 def _acceptnewconnection(self, sock, selector):
667 h = self._servicehandler
649 h = self._servicehandler
668 try:
650 conn, _addr = sock.accept()
669 conn, _addr = sock.accept()
670 except socket.error as inst:
671 if inst.args[0] == errno.EINTR:
672 return
673 raise
674
651
675 # Future improvement: On Python 3.7, maybe gc.freeze() can be used
652 # Future improvement: On Python 3.7, maybe gc.freeze() can be used
676 # to prevent COW memory from being touched by GC.
653 # to prevent COW memory from being touched by GC.
@@ -703,12 +680,7 b' class unixforkingservice(object):'
703
680
704 def _handlemainipc(self, sock, selector):
681 def _handlemainipc(self, sock, selector):
705 """Process messages sent from a worker"""
682 """Process messages sent from a worker"""
706 try:
683 path = sock.recv(32768) # large enough to receive path
707 path = sock.recv(32768) # large enough to receive path
708 except socket.error as inst:
709 if inst.args[0] == errno.EINTR:
710 return
711 raise
712 self._repoloader.load(path)
684 self._repoloader.load(path)
713
685
714 def _sigchldhandler(self, signal, frame):
686 def _sigchldhandler(self, signal, frame):
@@ -718,11 +690,7 b' class unixforkingservice(object):'
718 while self._workerpids:
690 while self._workerpids:
719 try:
691 try:
720 pid, _status = os.waitpid(-1, options)
692 pid, _status = os.waitpid(-1, options)
721 except OSError as inst:
693 except ChildProcessError:
722 if inst.errno == errno.EINTR:
723 continue
724 if inst.errno != errno.ECHILD:
725 raise
726 # no child processes at all (reaped by other waitpid()?)
694 # no child processes at all (reaped by other waitpid()?)
727 self._workerpids.clear()
695 self._workerpids.clear()
728 return
696 return
@@ -3,9 +3,6 b''
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
7
8 import errno
9
6
10 from .i18n import _
7 from .i18n import _
11 from .node import (
8 from .node import (
@@ -251,11 +248,6 b' def _process_files(tr, ctx, ms, files, n'
251 except OSError:
248 except OSError:
252 repo.ui.warn(_(b"trouble committing %s!\n") % uipathfn(f))
249 repo.ui.warn(_(b"trouble committing %s!\n") % uipathfn(f))
253 raise
250 raise
254 except IOError as inst:
255 errcode = getattr(inst, 'errno', errno.ENOENT)
256 if error or errcode and errcode != errno.ENOENT:
257 repo.ui.warn(_(b"trouble committing %s!\n") % uipathfn(f))
258 raise
259
251
260 # update manifest
252 # update manifest
261 removed = [f for f in removed if f in m1 or f in m2]
253 removed = [f for f in removed if f in m1 or f in m2]
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import errno
9 import errno
11 import os
10 import os
@@ -15,12 +14,11 b' from .pycompat import getattr'
15 from . import (
14 from . import (
16 encoding,
15 encoding,
17 error,
16 error,
18 pycompat,
19 util,
17 util,
20 )
18 )
21
19
22
20
23 class config(object):
21 class config:
24 def __init__(self, data=None):
22 def __init__(self, data=None):
25 self._current_source_level = 0
23 self._current_source_level = 0
26 self._data = {}
24 self._data = {}
@@ -111,20 +109,19 b' class config(object):'
111 return sorted(self._data.keys())
109 return sorted(self._data.keys())
112
110
113 def items(self, section):
111 def items(self, section):
114 items = pycompat.iteritems(self._data.get(section, {}))
112 items = self._data.get(section, {}).items()
115 return [(k, v[0]) for (k, v) in items]
113 return [(k, v[0]) for (k, v) in items]
116
114
117 def set(self, section, item, value, source=b""):
115 def set(self, section, item, value, source=b""):
118 if pycompat.ispy3:
116 assert not isinstance(
119 assert not isinstance(
117 section, str
120 section, str
118 ), b'config section may not be unicode strings on Python 3'
121 ), b'config section may not be unicode strings on Python 3'
119 assert not isinstance(
122 assert not isinstance(
120 item, str
123 item, str
121 ), b'config item may not be unicode strings on Python 3'
124 ), b'config item may not be unicode strings on Python 3'
122 assert not isinstance(
125 assert not isinstance(
123 value, str
126 value, str
124 ), b'config values may not be unicode strings on Python 3'
127 ), b'config values may not be unicode strings on Python 3'
128 if section not in self:
125 if section not in self:
129 self._data[section] = util.cowsortdict()
126 self._data[section] = util.cowsortdict()
130 else:
127 else:
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import functools
9 import functools
11 import re
10 import re
@@ -30,7 +29,7 b' def loadconfigtable(ui, extname, configt'
30 knownitems.update(items)
29 knownitems.update(items)
31
30
32
31
33 class configitem(object):
32 class configitem:
34 """represent a known config item
33 """represent a known config item
35
34
36 :section: the official config section where to find this item,
35 :section: the official config section where to find this item,
@@ -585,6 +584,11 b' coreconfigitem('
585 default=b'',
584 default=b'',
586 )
585 )
587 coreconfigitem(
586 coreconfigitem(
587 b'debug',
588 b'revlog.debug-delta',
589 default=False,
590 )
591 coreconfigitem(
588 b'defaults',
592 b'defaults',
589 b'.*',
593 b'.*',
590 default=None,
594 default=None,
@@ -1279,6 +1283,18 b' coreconfigitem('
1279 )
1283 )
1280 coreconfigitem(
1284 coreconfigitem(
1281 b'format',
1285 b'format',
1286 b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories',
1287 default=False,
1288 experimental=True,
1289 )
1290 coreconfigitem(
1291 b'format',
1292 b'use-dirstate-v2.automatic-upgrade-of-mismatching-repositories:quiet',
1293 default=False,
1294 experimental=True,
1295 )
1296 coreconfigitem(
1297 b'format',
1282 b'use-dirstate-tracked-hint',
1298 b'use-dirstate-tracked-hint',
1283 default=False,
1299 default=False,
1284 experimental=True,
1300 experimental=True,
@@ -1291,6 +1307,18 b' coreconfigitem('
1291 )
1307 )
1292 coreconfigitem(
1308 coreconfigitem(
1293 b'format',
1309 b'format',
1310 b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories',
1311 default=False,
1312 experimental=True,
1313 )
1314 coreconfigitem(
1315 b'format',
1316 b'use-dirstate-tracked-hint.automatic-upgrade-of-mismatching-repositories:quiet',
1317 default=False,
1318 experimental=True,
1319 )
1320 coreconfigitem(
1321 b'format',
1294 b'dotencode',
1322 b'dotencode',
1295 default=True,
1323 default=True,
1296 )
1324 )
@@ -1387,6 +1415,18 b' coreconfigitem('
1387 )
1415 )
1388 coreconfigitem(
1416 coreconfigitem(
1389 b'format',
1417 b'format',
1418 b'use-share-safe.automatic-upgrade-of-mismatching-repositories',
1419 default=False,
1420 experimental=True,
1421 )
1422 coreconfigitem(
1423 b'format',
1424 b'use-share-safe.automatic-upgrade-of-mismatching-repositories:quiet',
1425 default=False,
1426 experimental=True,
1427 )
1428 coreconfigitem(
1429 b'format',
1390 b'internal-phase',
1430 b'internal-phase',
1391 default=False,
1431 default=False,
1392 experimental=True,
1432 experimental=True,
@@ -1571,6 +1611,59 b' coreconfigitem('
1571 default=False,
1611 default=False,
1572 )
1612 )
1573 coreconfigitem(
1613 coreconfigitem(
1614 b'merge',
1615 b'disable-partial-tools',
1616 default=False,
1617 experimental=True,
1618 )
1619 coreconfigitem(
1620 b'partial-merge-tools',
1621 b'.*',
1622 default=None,
1623 generic=True,
1624 experimental=True,
1625 )
1626 coreconfigitem(
1627 b'partial-merge-tools',
1628 br'.*\.patterns',
1629 default=dynamicdefault,
1630 generic=True,
1631 priority=-1,
1632 experimental=True,
1633 )
1634 coreconfigitem(
1635 b'partial-merge-tools',
1636 br'.*\.executable$',
1637 default=dynamicdefault,
1638 generic=True,
1639 priority=-1,
1640 experimental=True,
1641 )
1642 coreconfigitem(
1643 b'partial-merge-tools',
1644 br'.*\.order',
1645 default=0,
1646 generic=True,
1647 priority=-1,
1648 experimental=True,
1649 )
1650 coreconfigitem(
1651 b'partial-merge-tools',
1652 br'.*\.args',
1653 default=b"$local $base $other",
1654 generic=True,
1655 priority=-1,
1656 experimental=True,
1657 )
1658 coreconfigitem(
1659 b'partial-merge-tools',
1660 br'.*\.disable',
1661 default=False,
1662 generic=True,
1663 priority=-1,
1664 experimental=True,
1665 )
1666 coreconfigitem(
1574 b'merge-tools',
1667 b'merge-tools',
1575 b'.*',
1668 b'.*',
1576 default=None,
1669 default=None,
@@ -1703,6 +1796,30 b' coreconfigitem('
1703 generic=True,
1796 generic=True,
1704 )
1797 )
1705 coreconfigitem(
1798 coreconfigitem(
1799 b'paths',
1800 b'.*:bookmarks.mode',
1801 default='default',
1802 generic=True,
1803 )
1804 coreconfigitem(
1805 b'paths',
1806 b'.*:multi-urls',
1807 default=False,
1808 generic=True,
1809 )
1810 coreconfigitem(
1811 b'paths',
1812 b'.*:pushrev',
1813 default=None,
1814 generic=True,
1815 )
1816 coreconfigitem(
1817 b'paths',
1818 b'.*:pushurl',
1819 default=None,
1820 generic=True,
1821 )
1822 coreconfigitem(
1706 b'phases',
1823 b'phases',
1707 b'checksubrepos',
1824 b'checksubrepos',
1708 default=b'follow',
1825 default=b'follow',
@@ -2053,6 +2170,16 b' coreconfigitem('
2053 default=True,
2170 default=True,
2054 )
2171 )
2055 coreconfigitem(
2172 coreconfigitem(
2173 b'share',
2174 b'safe-mismatch.source-not-safe:verbose-upgrade',
2175 default=True,
2176 )
2177 coreconfigitem(
2178 b'share',
2179 b'safe-mismatch.source-safe:verbose-upgrade',
2180 default=True,
2181 )
2182 coreconfigitem(
2056 b'shelve',
2183 b'shelve',
2057 b'maxbackups',
2184 b'maxbackups',
2058 default=10,
2185 default=10,
@@ -5,9 +5,7 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8
9
10 import errno
11 import filecmp
9 import filecmp
12 import os
10 import os
13 import stat
11 import stat
@@ -33,7 +31,6 b' from . import ('
33 patch,
31 patch,
34 pathutil,
32 pathutil,
35 phases,
33 phases,
36 pycompat,
37 repoview,
34 repoview,
38 scmutil,
35 scmutil,
39 sparse,
36 sparse,
@@ -52,7 +49,7 b' from .dirstateutils import ('
52 propertycache = util.propertycache
49 propertycache = util.propertycache
53
50
54
51
55 class basectx(object):
52 class basectx:
56 """A basectx object represents the common logic for its children:
53 """A basectx object represents the common logic for its children:
57 changectx: read-only context that is already present in the repo,
54 changectx: read-only context that is already present in the repo,
58 workingctx: a context that represents the working directory and can
55 workingctx: a context that represents the working directory and can
@@ -124,7 +121,7 b' class basectx(object):'
124 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
121 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
125 deletedset = set(deleted)
122 deletedset = set(deleted)
126 d = mf1.diff(mf2, match=match, clean=listclean)
123 d = mf1.diff(mf2, match=match, clean=listclean)
127 for fn, value in pycompat.iteritems(d):
124 for fn, value in d.items():
128 if fn in deletedset:
125 if fn in deletedset:
129 continue
126 continue
130 if value is None:
127 if value is None:
@@ -797,7 +794,7 b' class changectx(basectx):'
797 return self.walk(match)
794 return self.walk(match)
798
795
799
796
800 class basefilectx(object):
797 class basefilectx:
801 """A filecontext object represents the common logic for its children:
798 """A filecontext object represents the common logic for its children:
802 filectx: read-only access to a filerevision that is already present
799 filectx: read-only access to a filerevision that is already present
803 in the repo,
800 in the repo,
@@ -993,6 +990,16 b' class basefilectx(object):'
993 if self._repo._encodefilterpats:
990 if self._repo._encodefilterpats:
994 # can't rely on size() because wdir content may be decoded
991 # can't rely on size() because wdir content may be decoded
995 return self._filelog.cmp(self._filenode, fctx.data())
992 return self._filelog.cmp(self._filenode, fctx.data())
993 # filelog.size() has two special cases:
994 # - censored metadata
995 # - copy/rename tracking
996 # The first is detected by peaking into the delta,
997 # the second is detected by abusing parent order
998 # in the revlog index as flag bit. This leaves files using
999 # the dummy encoding and non-standard meta attributes.
1000 # The following check is a special case for the empty
1001 # metadata block used if the raw file content starts with '\1\n'.
1002 # Cases of arbitrary metadata flags are currently mishandled.
996 if self.size() - 4 == fctx.size():
1003 if self.size() - 4 == fctx.size():
997 # size() can match:
1004 # size() can match:
998 # if file data starts with '\1\n', empty metadata block is
1005 # if file data starts with '\1\n', empty metadata block is
@@ -1729,9 +1736,7 b' class workingctx(committablectx):'
1729 def copy(self, source, dest):
1736 def copy(self, source, dest):
1730 try:
1737 try:
1731 st = self._repo.wvfs.lstat(dest)
1738 st = self._repo.wvfs.lstat(dest)
1732 except OSError as err:
1739 except FileNotFoundError:
1733 if err.errno != errno.ENOENT:
1734 raise
1735 self._repo.ui.warn(
1740 self._repo.ui.warn(
1736 _(b"%s does not exist!\n") % self._repo.dirstate.pathto(dest)
1741 _(b"%s does not exist!\n") % self._repo.dirstate.pathto(dest)
1737 )
1742 )
@@ -2161,9 +2166,7 b' class workingfilectx(committablefilectx)'
2161 t, tz = self._changectx.date()
2166 t, tz = self._changectx.date()
2162 try:
2167 try:
2163 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
2168 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
2164 except OSError as err:
2169 except FileNotFoundError:
2165 if err.errno != errno.ENOENT:
2166 raise
2167 return (t, tz)
2170 return (t, tz)
2168
2171
2169 def exists(self):
2172 def exists(self):
@@ -2422,7 +2425,7 b' class overlayworkingctx(committablectx):'
2422 # Test that each new directory to be created to write this path from p2
2425 # Test that each new directory to be created to write this path from p2
2423 # is not a file in p1.
2426 # is not a file in p1.
2424 components = path.split(b'/')
2427 components = path.split(b'/')
2425 for i in pycompat.xrange(len(components)):
2428 for i in range(len(components)):
2426 component = b"/".join(components[0:i])
2429 component = b"/".join(components[0:i])
2427 if component in self:
2430 if component in self:
2428 fail(path, component)
2431 fail(path, component)
@@ -3105,7 +3108,7 b' class metadataonlyctx(committablectx):'
3105 return scmutil.status(modified, added, removed, [], [], [], [])
3108 return scmutil.status(modified, added, removed, [], [], [], [])
3106
3109
3107
3110
3108 class arbitraryfilectx(object):
3111 class arbitraryfilectx:
3109 """Allows you to use filectx-like functions on a file in an arbitrary
3112 """Allows you to use filectx-like functions on a file in an arbitrary
3110 location on disk, possibly not in the working directory.
3113 location on disk, possibly not in the working directory.
3111 """
3114 """
@@ -6,7 +6,6 b''
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
10
9
11 import collections
10 import collections
12 import os
11 import os
@@ -18,7 +17,6 b' from . import ('
18 match as matchmod,
17 match as matchmod,
19 pathutil,
18 pathutil,
20 policy,
19 policy,
21 pycompat,
22 util,
20 util,
23 )
21 )
24
22
@@ -69,7 +67,7 b' def _filter(src, dst, t):'
69 def _chain(prefix, suffix):
67 def _chain(prefix, suffix):
70 """chain two sets of copies 'prefix' and 'suffix'"""
68 """chain two sets of copies 'prefix' and 'suffix'"""
71 result = prefix.copy()
69 result = prefix.copy()
72 for key, value in pycompat.iteritems(suffix):
70 for key, value in suffix.items():
73 result[key] = prefix.get(value, value)
71 result[key] = prefix.get(value, value)
74 return result
72 return result
75
73
@@ -409,7 +407,7 b' def _combine_changeset_copies('
409
407
410 if childcopies:
408 if childcopies:
411 newcopies = copies.copy()
409 newcopies = copies.copy()
412 for dest, source in pycompat.iteritems(childcopies):
410 for dest, source in childcopies.items():
413 prev = copies.get(source)
411 prev = copies.get(source)
414 if prev is not None and prev[1] is not None:
412 if prev is not None and prev[1] is not None:
415 source = prev[1]
413 source = prev[1]
@@ -624,7 +622,7 b' def _combine_changeset_copies_extra('
624 newcopies = copies
622 newcopies = copies
625 if childcopies:
623 if childcopies:
626 newcopies = copies.copy()
624 newcopies = copies.copy()
627 for dest, source in pycompat.iteritems(childcopies):
625 for dest, source in childcopies.items():
628 prev = copies.get(source)
626 prev = copies.get(source)
629 if prev is not None and prev[1] is not None:
627 if prev is not None and prev[1] is not None:
630 source = prev[1]
628 source = prev[1]
@@ -722,7 +720,7 b' def _reverse_renames(copies, dst, match)'
722 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
720 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
723 # arbitrarily pick one of the renames.
721 # arbitrarily pick one of the renames.
724 r = {}
722 r = {}
725 for k, v in sorted(pycompat.iteritems(copies)):
723 for k, v in sorted(copies.items()):
726 if match and not match(v):
724 if match and not match(v):
727 continue
725 continue
728 # remove copies
726 # remove copies
@@ -889,7 +887,7 b' def _checksinglesidecopies('
889 copy[dst] = src
887 copy[dst] = src
890
888
891
889
892 class branch_copies(object):
890 class branch_copies:
893 """Information about copies made on one side of a merge/graft.
891 """Information about copies made on one side of a merge/graft.
894
892
895 "copy" is a mapping from destination name -> source name,
893 "copy" is a mapping from destination name -> source name,
@@ -1081,7 +1079,7 b' def _dir_renames(repo, ctx, copy, fullco'
1081
1079
1082 # examine each file copy for a potential directory move, which is
1080 # examine each file copy for a potential directory move, which is
1083 # when all the files in a directory are moved to a new directory
1081 # when all the files in a directory are moved to a new directory
1084 for dst, src in pycompat.iteritems(fullcopy):
1082 for dst, src in fullcopy.items():
1085 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
1083 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
1086 if dsrc in invalid:
1084 if dsrc in invalid:
1087 # already seen to be uninteresting
1085 # already seen to be uninteresting
@@ -1104,7 +1102,7 b' def _dir_renames(repo, ctx, copy, fullco'
1104 if not dirmove:
1102 if not dirmove:
1105 return {}, {}
1103 return {}, {}
1106
1104
1107 dirmove = {k + b"/": v + b"/" for k, v in pycompat.iteritems(dirmove)}
1105 dirmove = {k + b"/": v + b"/" for k, v in dirmove.items()}
1108
1106
1109 for d in dirmove:
1107 for d in dirmove:
1110 repo.ui.debug(
1108 repo.ui.debug(
@@ -1187,7 +1185,7 b' def _heuristicscopytracing(repo, c1, c2,'
1187
1185
1188 copies2 = {}
1186 copies2 = {}
1189 cp = _forwardcopies(base, c2)
1187 cp = _forwardcopies(base, c2)
1190 for dst, src in pycompat.iteritems(cp):
1188 for dst, src in cp.items():
1191 if src in m1:
1189 if src in m1:
1192 copies2[dst] = src
1190 copies2[dst] = src
1193
1191
@@ -1305,5 +1303,5 b' def graftcopies(wctx, ctx, base):'
1305 for dest, __ in list(new_copies.items()):
1303 for dest, __ in list(new_copies.items()):
1306 if dest in parent:
1304 if dest in parent:
1307 del new_copies[dest]
1305 del new_copies[dest]
1308 for dst, src in pycompat.iteritems(new_copies):
1306 for dst, src in new_copies.items():
1309 wctx[dst].markcopied(src)
1307 wctx[dst].markcopied(src)
@@ -8,7 +8,6 b''
8 # This code is based on the Mark Edgington's crecord extension.
8 # This code is based on the Mark Edgington's crecord extension.
9 # (Itself based on Bryan O'Sullivan's record extension.)
9 # (Itself based on Bryan O'Sullivan's record extension.)
10
10
11 from __future__ import absolute_import
12
11
13 import os
12 import os
14 import re
13 import re
@@ -83,7 +82,7 b' def checkcurses(ui):'
83 return curses and ui.interface(b"chunkselector") == b"curses"
82 return curses and ui.interface(b"chunkselector") == b"curses"
84
83
85
84
86 class patchnode(object):
85 class patchnode:
87 """abstract class for patch graph nodes
86 """abstract class for patch graph nodes
88 (i.e. patchroot, header, hunk, hunkline)
87 (i.e. patchroot, header, hunk, hunkline)
89 """
88 """
@@ -506,7 +505,7 b' class uihunk(patchnode):'
506 text = line.linetext
505 text = line.linetext
507 if line.linetext == diffhelper.MISSING_NEWLINE_MARKER:
506 if line.linetext == diffhelper.MISSING_NEWLINE_MARKER:
508 noeol = True
507 noeol = True
509 break
508 continue
510 if line.applied:
509 if line.applied:
511 if text.startswith(b'+'):
510 if text.startswith(b'+'):
512 dels.append(text[1:])
511 dels.append(text[1:])
@@ -602,7 +601,7 b' def testchunkselector(testfn, ui, header'
602 """
601 """
603 chunkselector = curseschunkselector(headerlist, ui, operation)
602 chunkselector = curseschunkselector(headerlist, ui, operation)
604
603
605 class dummystdscr(object):
604 class dummystdscr:
606 def clear(self):
605 def clear(self):
607 pass
606 pass
608
607
@@ -629,7 +628,7 b' def testchunkselector(testfn, ui, header'
629 }
628 }
630
629
631
630
632 class curseschunkselector(object):
631 class curseschunkselector:
633 def __init__(self, headerlist, ui, operation=None):
632 def __init__(self, headerlist, ui, operation=None):
634 # put the headers into a patch object
633 # put the headers into a patch object
635 self.headerlist = patch(headerlist)
634 self.headerlist = patch(headerlist)
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import heapq
9 import heapq
11
10
@@ -204,7 +203,7 b' def _genrevdescendants(repo, revs, follo'
204 def _builddescendantsmap(repo, startrev, followfirst):
203 def _builddescendantsmap(repo, startrev, followfirst):
205 """Build map of 'rev -> child revs', offset from startrev"""
204 """Build map of 'rev -> child revs', offset from startrev"""
206 cl = repo.changelog
205 cl = repo.changelog
207 descmap = [[] for _rev in pycompat.xrange(startrev, len(cl))]
206 descmap = [[] for _rev in range(startrev, len(cl))]
208 for currev in cl.revs(startrev + 1):
207 for currev in cl.revs(startrev + 1):
209 p1rev, p2rev = cl.parentrevs(currev)
208 p1rev, p2rev = cl.parentrevs(currev)
210 if p1rev >= startrev:
209 if p1rev >= startrev:
@@ -272,7 +271,7 b' def descendantrevs(revs, revsfn, parentr'
272 break
271 break
273
272
274
273
275 class subsetparentswalker(object):
274 class subsetparentswalker:
276 r"""Scan adjacent ancestors in the graph given by the subset
275 r"""Scan adjacent ancestors in the graph given by the subset
277
276
278 This computes parent-child relations in the sub graph filtered by
277 This computes parent-child relations in the sub graph filtered by
@@ -648,7 +647,7 b' def blockdescendants(fctx, fromline, tol'
648
647
649
648
650 @attr.s(slots=True, frozen=True)
649 @attr.s(slots=True, frozen=True)
651 class annotateline(object):
650 class annotateline:
652 fctx = attr.ib()
651 fctx = attr.ib()
653 lineno = attr.ib()
652 lineno = attr.ib()
654 # Whether this annotation was the result of a skip-annotate.
653 # Whether this annotation was the result of a skip-annotate.
@@ -657,7 +656,7 b' class annotateline(object):'
657
656
658
657
659 @attr.s(slots=True, frozen=True)
658 @attr.s(slots=True, frozen=True)
660 class _annotatedfile(object):
659 class _annotatedfile:
661 # list indexed by lineno - 1
660 # list indexed by lineno - 1
662 fctxs = attr.ib()
661 fctxs = attr.ib()
663 linenos = attr.ib()
662 linenos = attr.ib()
@@ -726,7 +725,7 b' def _annotatepair(parents, childfctx, ch'
726 for idx, (parent, blocks) in enumerate(pblocks):
725 for idx, (parent, blocks) in enumerate(pblocks):
727 for (a1, a2, b1, b2), _t in blocks:
726 for (a1, a2, b1, b2), _t in blocks:
728 if a2 - a1 >= b2 - b1:
727 if a2 - a1 >= b2 - b1:
729 for bk in pycompat.xrange(b1, b2):
728 for bk in range(b1, b2):
730 if child.fctxs[bk] == childfctx:
729 if child.fctxs[bk] == childfctx:
731 ak = min(a1 + (bk - b1), a2 - 1)
730 ak = min(a1 + (bk - b1), a2 - 1)
732 child.fctxs[bk] = parent.fctxs[ak]
731 child.fctxs[bk] = parent.fctxs[ak]
@@ -739,7 +738,7 b' def _annotatepair(parents, childfctx, ch'
739 # line.
738 # line.
740 for parent, blocks in remaining:
739 for parent, blocks in remaining:
741 for a1, a2, b1, b2 in blocks:
740 for a1, a2, b1, b2 in blocks:
742 for bk in pycompat.xrange(b1, b2):
741 for bk in range(b1, b2):
743 if child.fctxs[bk] == childfctx:
742 if child.fctxs[bk] == childfctx:
744 ak = min(a1 + (bk - b1), a2 - 1)
743 ak = min(a1 + (bk - b1), a2 - 1)
745 child.fctxs[bk] = parent.fctxs[ak]
744 child.fctxs[bk] = parent.fctxs[ak]
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import re
9 import re
11 import string
10 import string
@@ -229,7 +228,7 b' def parsedag(desc):'
229 c, digs = nextrun(nextch(), pycompat.bytestr(string.digits))
228 c, digs = nextrun(nextch(), pycompat.bytestr(string.digits))
230 # pytype: enable=wrong-arg-types
229 # pytype: enable=wrong-arg-types
231 n = int(digs)
230 n = int(digs)
232 for i in pycompat.xrange(0, n):
231 for i in range(0, n):
233 yield b'n', (r, [p1])
232 yield b'n', (r, [p1])
234 p1 = r
233 p1 = r
235 r += 1
234 r += 1
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import binascii
9 import binascii
11 import codecs
10 import codecs
@@ -74,6 +73,7 b' from . import ('
74 repoview,
73 repoview,
75 requirements,
74 requirements,
76 revlog,
75 revlog,
76 revlogutils,
77 revset,
77 revset,
78 revsetlang,
78 revsetlang,
79 scmutil,
79 scmutil,
@@ -104,6 +104,8 b' from .utils import ('
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 constants as revlog_constants,
108 debug as revlog_debug,
107 deltas as deltautil,
109 deltas as deltautil,
108 nodemap,
110 nodemap,
109 rewrite,
111 rewrite,
@@ -246,9 +248,7 b' def debugbuilddag('
246 if mergeable_file:
248 if mergeable_file:
247 linesperrev = 2
249 linesperrev = 2
248 # make a file with k lines per rev
250 # make a file with k lines per rev
249 initialmergedlines = [
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
250 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
251 ]
252 initialmergedlines.append(b"")
252 initialmergedlines.append(b"")
253
253
254 tags = []
254 tags = []
@@ -494,7 +494,7 b' def debugcapabilities(ui, path, **opts):'
494 b2caps = bundle2.bundle2caps(peer)
494 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
495 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(pycompat.iteritems(b2caps)):
497 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
498 ui.write(b' %s\n' % key)
499 for v in values:
499 for v in values:
500 ui.write(b' %s\n' % v)
500 ui.write(b' %s\n' % v)
@@ -758,10 +758,22 b' def debugdeltachain(ui, repo, file_=None'
758 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
759
759
760 :``rev``: revision number
760 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
761 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
762 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
763 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
764 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
775 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
765 :``compsize``: compressed size of revision
777 :``compsize``: compressed size of revision
766 :``uncompsize``: uncompressed size of revision
778 :``uncompsize``: uncompressed size of revision
767 :``chainsize``: total size of compressed revisions in chain
779 :``chainsize``: total size of compressed revisions in chain
@@ -795,25 +807,71 b' def debugdeltachain(ui, repo, file_=None'
795 generaldelta = r._generaldelta
807 generaldelta = r._generaldelta
796 withsparseread = getattr(r, '_withsparseread', False)
808 withsparseread = getattr(r, '_withsparseread', False)
797
809
810 # security to avoid crash on corrupted revlogs
811 total_revs = len(index)
812
798 def revinfo(rev):
813 def revinfo(rev):
799 e = index[rev]
814 e = index[rev]
800 compsize = e[1]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
801 uncompsize = e[2]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
802 chainsize = 0
817 chainsize = 0
803
818
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822
823 # If the parents of a revision has an empty delta, we never try to delta
824 # against that parent, but directly against the delta base of that
825 # parent (recursively). It avoids adding a useless entry in the chain.
826 #
827 # However we need to detect that as a special case for delta-type, that
828 # is not simply "other".
829 p1_base = p1
830 if p1 != nullrev and p1 < total_revs:
831 e1 = index[p1]
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 if (
835 new_base == p1_base
836 or new_base == nullrev
837 or new_base >= total_revs
838 ):
839 break
840 p1_base = new_base
841 e1 = index[p1_base]
842 p2_base = p2
843 if p2 != nullrev and p2 < total_revs:
844 e2 = index[p2]
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 if (
848 new_base == p2_base
849 or new_base == nullrev
850 or new_base >= total_revs
851 ):
852 break
853 p2_base = new_base
854 e2 = index[p2_base]
855
804 if generaldelta:
856 if generaldelta:
805 if e[3] == e[5]:
857 if base == p1:
806 deltatype = b'p1'
858 deltatype = b'p1'
807 elif e[3] == e[6]:
859 elif base == p2:
808 deltatype = b'p2'
860 deltatype = b'p2'
809 elif e[3] == rev - 1:
861 elif base == rev:
862 deltatype = b'base'
863 elif base == p1_base:
864 deltatype = b'skip1'
865 elif base == p2_base:
866 deltatype = b'skip2'
867 elif r.issnapshot(rev):
868 deltatype = b'snap'
869 elif base == rev - 1:
810 deltatype = b'prev'
870 deltatype = b'prev'
811 elif e[3] == rev:
812 deltatype = b'base'
813 else:
871 else:
814 deltatype = b'other'
872 deltatype = b'other'
815 else:
873 else:
816 if e[3] == rev:
874 if base == rev:
817 deltatype = b'base'
875 deltatype = b'base'
818 else:
876 else:
819 deltatype = b'prev'
877 deltatype = b'prev'
@@ -821,14 +879,14 b' def debugdeltachain(ui, repo, file_=None'
821 chain = r._deltachain(rev)[0]
879 chain = r._deltachain(rev)[0]
822 for iterrev in chain:
880 for iterrev in chain:
823 e = index[iterrev]
881 e = index[iterrev]
824 chainsize += e[1]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
825
883
826 return compsize, uncompsize, deltatype, chain, chainsize
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
827
885
828 fm = ui.formatter(b'debugdeltachain', opts)
886 fm = ui.formatter(b'debugdeltachain', opts)
829
887
830 fm.plain(
888 fm.plain(
831 b' rev chain# chainlen prev delta '
889 b' rev p1 p2 chain# chainlen prev delta '
832 b'size rawsize chainsize ratio lindist extradist '
890 b'size rawsize chainsize ratio lindist extradist '
833 b'extraratio'
891 b'extraratio'
834 )
892 )
@@ -838,7 +896,7 b' def debugdeltachain(ui, repo, file_=None'
838
896
839 chainbases = {}
897 chainbases = {}
840 for rev in r:
898 for rev in r:
841 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
842 chainbase = chain[0]
900 chainbase = chain[0]
843 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
844 basestart = start(chainbase)
902 basestart = start(chainbase)
@@ -862,11 +920,13 b' def debugdeltachain(ui, repo, file_=None'
862
920
863 fm.startitem()
921 fm.startitem()
864 fm.write(
922 fm.write(
865 b'rev chainid chainlen prevrev deltatype compsize '
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
866 b'uncompsize chainsize chainratio lindist extradist '
924 b'uncompsize chainsize chainratio lindist extradist '
867 b'extraratio',
925 b'extraratio',
868 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
869 rev,
927 rev,
928 p1,
929 p2,
870 chainid,
930 chainid,
871 len(chain),
931 len(chain),
872 prevrev,
932 prevrev,
@@ -929,6 +989,65 b' def debugdeltachain(ui, repo, file_=None'
929
989
930
990
931 @command(
991 @command(
992 b'debug-delta-find',
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 _(b'-c|-m|FILE REV'),
995 optionalrepo=True,
996 )
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
998 """display the computation to get to a valid delta for storing REV
999
1000 This command will replay the process used to find the "best" delta to store
1001 a revision and display information about all the steps used to get to that
1002 result.
1003
1004 The revision use the revision number of the target storage (not changelog
1005 revision number).
1006
1007 note: the process is initiated from a full text of the revision to store.
1008 """
1009 opts = pycompat.byteskwargs(opts)
1010 if arg_2 is None:
1011 file_ = None
1012 rev = arg_1
1013 else:
1014 file_ = arg_1
1015 rev = arg_2
1016
1017 rev = int(rev)
1018
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020
1021 deltacomputer = deltautil.deltacomputer(
1022 revlog,
1023 write_debug=ui.write,
1024 debug_search=True,
1025 )
1026
1027 node = revlog.node(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1029 p1 = revlog.node(p1r)
1030 p2 = revlog.node(p2r)
1031 btext = [revlog.revision(rev)]
1032 textlen = len(btext[0])
1033 cachedelta = None
1034 flags = revlog.flags(rev)
1035
1036 revinfo = revlogutils.revisioninfo(
1037 node,
1038 p1,
1039 p2,
1040 btext,
1041 textlen,
1042 cachedelta,
1043 flags,
1044 )
1045
1046 fh = revlog._datafp()
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048
1049
1050 @command(
932 b'debugdirstate|debugstate',
1051 b'debugdirstate|debugstate',
933 [
1052 [
934 (
1053 (
@@ -1018,6 +1137,22 b' def debugstate(ui, repo, **opts):'
1018
1137
1019
1138
1020 @command(
1139 @command(
1140 b'debugdirstateignorepatternshash',
1141 [],
1142 _(b''),
1143 )
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 """show the hash of ignore patterns stored in dirstate if v2,
1146 or nothing for dirstate-v2
1147 """
1148 if repo.dirstate._use_dirstate_v2:
1149 docket = repo.dirstate._map.docket
1150 hash_len = 20 # 160 bits for SHA-1
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153
1154
1155 @command(
1021 b'debugdiscovery',
1156 b'debugdiscovery',
1022 [
1157 [
1023 (b'', b'old', None, _(b'use old-style discovery')),
1158 (b'', b'old', None, _(b'use old-style discovery')),
@@ -1039,7 +1174,7 b' def debugstate(ui, repo, **opts):'
1039 b'',
1174 b'',
1040 b'remote-as-revs',
1175 b'remote-as-revs',
1041 b"",
1176 b"",
1042 b'use local as remote, with only these these revisions',
1177 b'use local as remote, with only these revisions',
1043 ),
1178 ),
1044 ]
1179 ]
1045 + cmdutil.remoteopts
1180 + cmdutil.remoteopts
@@ -1240,6 +1375,7 b' def debugdiscovery(ui, repo, remoteurl=b'
1240 # display discovery summary
1375 # display discovery summary
1241 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1376 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1242 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1377 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1378 fm.plain(b"queries: %(total-queries)9d\n" % data)
1243 fm.plain(b"heads summary:\n")
1379 fm.plain(b"heads summary:\n")
1244 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1380 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1245 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1381 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
@@ -1728,45 +1864,27 b' def debugignore(ui, repo, *files, **opts'
1728
1864
1729
1865
1730 @command(
1866 @command(
1731 b'debugindex',
1867 b'debug-revlog-index|debugindex',
1732 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1868 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1733 _(b'-c|-m|FILE'),
1869 _(b'-c|-m|FILE'),
1734 )
1870 )
1735 def debugindex(ui, repo, file_=None, **opts):
1871 def debugindex(ui, repo, file_=None, **opts):
1736 """dump index data for a storage primitive"""
1872 """dump index data for a revlog"""
1737 opts = pycompat.byteskwargs(opts)
1873 opts = pycompat.byteskwargs(opts)
1738 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1874 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1739
1875
1740 if ui.debugflag:
1741 shortfn = hex
1742 else:
1743 shortfn = short
1744
1745 idlen = 12
1746 for i in store:
1747 idlen = len(shortfn(store.node(i)))
1748 break
1749
1750 fm = ui.formatter(b'debugindex', opts)
1876 fm = ui.formatter(b'debugindex', opts)
1751 fm.plain(
1877
1752 b' rev linkrev %s %s p2\n'
1878 revlog = getattr(store, b'_revlog', store)
1753 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1879
1880 return revlog_debug.debug_index(
1881 ui,
1882 repo,
1883 formatter=fm,
1884 revlog=revlog,
1885 full_node=ui.debugflag,
1754 )
1886 )
1755
1887
1756 for rev in store:
1757 node = store.node(rev)
1758 parents = store.parents(node)
1759
1760 fm.startitem()
1761 fm.write(b'rev', b'%6d ', rev)
1762 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1763 fm.write(b'node', b'%s ', shortfn(node))
1764 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1765 fm.write(b'p2', b'%s', shortfn(parents[1]))
1766 fm.plain(b'\n')
1767
1768 fm.end()
1769
1770
1888
1771 @command(
1889 @command(
1772 b'debugindexdot',
1890 b'debugindexdot',
@@ -2185,7 +2303,19 b' def debuglocks(ui, repo, **opts):'
2185 except error.LockHeld:
2303 except error.LockHeld:
2186 raise error.Abort(_(b'lock is already held'))
2304 raise error.Abort(_(b'lock is already held'))
2187 if len(locks):
2305 if len(locks):
2188 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2306 try:
2307 if ui.interactive():
2308 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2309 ui.promptchoice(prompt)
2310 else:
2311 msg = b"%d locks held, waiting for signal\n"
2312 msg %= len(locks)
2313 ui.status(msg)
2314 while True: # XXX wait for a signal
2315 time.sleep(0.1)
2316 except KeyboardInterrupt:
2317 msg = b"signal-received releasing locks\n"
2318 ui.status(msg)
2189 return 0
2319 return 0
2190 finally:
2320 finally:
2191 release(*locks)
2321 release(*locks)
@@ -2220,9 +2350,8 b' def debuglocks(ui, repo, **opts):'
2220 )
2350 )
2221 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2351 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2222 return 1
2352 return 1
2223 except OSError as e:
2353 except FileNotFoundError:
2224 if e.errno != errno.ENOENT:
2354 pass
2225 raise
2226
2355
2227 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2356 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2228 return 0
2357 return 0
@@ -2403,11 +2532,11 b' def debugmergestate(ui, repo, *args, **o'
2403 fm_files.end()
2532 fm_files.end()
2404
2533
2405 fm_extras = fm.nested(b'extras')
2534 fm_extras = fm.nested(b'extras')
2406 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2535 for f, d in sorted(ms.allextras().items()):
2407 if f in ms:
2536 if f in ms:
2408 # If file is in mergestate, we have already processed it's extras
2537 # If file is in mergestate, we have already processed it's extras
2409 continue
2538 continue
2410 for k, v in pycompat.iteritems(d):
2539 for k, v in d.items():
2411 fm_extras.startitem()
2540 fm_extras.startitem()
2412 fm_extras.data(file=f)
2541 fm_extras.data(file=f)
2413 fm_extras.data(key=k)
2542 fm_extras.data(key=k)
@@ -2424,7 +2553,7 b' def debugnamecomplete(ui, repo, *args):'
2424 names = set()
2553 names = set()
2425 # since we previously only listed open branches, we will handle that
2554 # since we previously only listed open branches, we will handle that
2426 # specially (after this for loop)
2555 # specially (after this for loop)
2427 for name, ns in pycompat.iteritems(repo.names):
2556 for name, ns in repo.names.items():
2428 if name != b'branches':
2557 if name != b'branches':
2429 names.update(ns.listnames(repo))
2558 names.update(ns.listnames(repo))
2430 names.update(
2559 names.update(
@@ -2542,9 +2671,9 b' def debugobsolete(ui, repo, precursor=No'
2542 # local repository.
2671 # local repository.
2543 n = bin(s)
2672 n = bin(s)
2544 if len(n) != repo.nodeconstants.nodelen:
2673 if len(n) != repo.nodeconstants.nodelen:
2545 raise TypeError()
2674 raise ValueError
2546 return n
2675 return n
2547 except TypeError:
2676 except ValueError:
2548 raise error.InputError(
2677 raise error.InputError(
2549 b'changeset references must be full hexadecimal '
2678 b'changeset references must be full hexadecimal '
2550 b'node identifiers'
2679 b'node identifiers'
@@ -2674,7 +2803,7 b' def debugp1copies(ui, repo, **opts):'
2674 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2803 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2675 _(b'[-r REV]'),
2804 _(b'[-r REV]'),
2676 )
2805 )
2677 def debugp1copies(ui, repo, **opts):
2806 def debugp2copies(ui, repo, **opts):
2678 """dump copy information compared to p2"""
2807 """dump copy information compared to p2"""
2679
2808
2680 opts = pycompat.byteskwargs(opts)
2809 opts = pycompat.byteskwargs(opts)
@@ -2718,7 +2847,7 b' def debugpathcomplete(ui, repo, *specs, '
2718 fullpaths = opts['full']
2847 fullpaths = opts['full']
2719 files, dirs = set(), set()
2848 files, dirs = set(), set()
2720 adddir, addfile = dirs.add, files.add
2849 adddir, addfile = dirs.add, files.add
2721 for f, st in pycompat.iteritems(dirstate):
2850 for f, st in dirstate.items():
2722 if f.startswith(spec) and st.state in acceptable:
2851 if f.startswith(spec) and st.state in acceptable:
2723 if fixpaths:
2852 if fixpaths:
2724 f = f.replace(b'/', pycompat.ossep)
2853 f = f.replace(b'/', pycompat.ossep)
@@ -2907,7 +3036,7 b' def debugpushkey(ui, repopath, namespace'
2907 ui.status(pycompat.bytestr(r) + b'\n')
3036 ui.status(pycompat.bytestr(r) + b'\n')
2908 return not r
3037 return not r
2909 else:
3038 else:
2910 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
3039 for k, v in sorted(target.listkeys(namespace).items()):
2911 ui.write(
3040 ui.write(
2912 b"%s\t%s\n"
3041 b"%s\t%s\n"
2913 % (stringutil.escapestr(k), stringutil.escapestr(v))
3042 % (stringutil.escapestr(k), stringutil.escapestr(v))
@@ -3061,7 +3190,7 b' def debugrevlog(ui, repo, file_=None, **'
3061 ts = 0
3190 ts = 0
3062 heads = set()
3191 heads = set()
3063
3192
3064 for rev in pycompat.xrange(numrevs):
3193 for rev in range(numrevs):
3065 dbase = r.deltaparent(rev)
3194 dbase = r.deltaparent(rev)
3066 if dbase == -1:
3195 if dbase == -1:
3067 dbase = rev
3196 dbase = rev
@@ -3159,7 +3288,7 b' def debugrevlog(ui, repo, file_=None, **'
3159 l[2] += size
3288 l[2] += size
3160
3289
3161 numrevs = len(r)
3290 numrevs = len(r)
3162 for rev in pycompat.xrange(numrevs):
3291 for rev in range(numrevs):
3163 p1, p2 = r.parentrevs(rev)
3292 p1, p2 = r.parentrevs(rev)
3164 delta = r.deltaparent(rev)
3293 delta = r.deltaparent(rev)
3165 if format > 0:
3294 if format > 0:
@@ -4289,7 +4418,7 b' def debugwireargs(ui, repopath, *vals, *'
4289 for opt in cmdutil.remoteopts:
4418 for opt in cmdutil.remoteopts:
4290 del opts[opt[1]]
4419 del opts[opt[1]]
4291 args = {}
4420 args = {}
4292 for k, v in pycompat.iteritems(opts):
4421 for k, v in opts.items():
4293 if v:
4422 if v:
4294 args[k] = v
4423 args[k] = v
4295 args = pycompat.strkwargs(args)
4424 args = pycompat.strkwargs(args)
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from .i18n import _
9 from .i18n import _
11 from . import bookmarks, error, obsutil, scmutil, stack
10 from . import bookmarks, error, obsutil, scmutil, stack
@@ -5,13 +5,11 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 from .i18n import _
9 from .i18n import _
11
10
12 from . import (
11 from . import (
13 error,
12 error,
14 pycompat,
15 )
13 )
16
14
17 MISSING_NEWLINE_MARKER = b'\\ No newline at end of file\n'
15 MISSING_NEWLINE_MARKER = b'\\ No newline at end of file\n'
@@ -30,7 +28,7 b' def addlines(fp, hunk, lena, lenb, a, b)'
30 num = max(todoa, todob)
28 num = max(todoa, todob)
31 if num == 0:
29 if num == 0:
32 break
30 break
33 for i in pycompat.xrange(num):
31 for i in range(num):
34 s = fp.readline()
32 s = fp.readline()
35 if not s:
33 if not s:
36 raise error.ParseError(_(b'incomplete hunk'))
34 raise error.ParseError(_(b'incomplete hunk'))
@@ -77,7 +75,7 b' def testhunk(a, b, bstart):'
77 blen = len(b)
75 blen = len(b)
78 if alen > blen - bstart or bstart < 0:
76 if alen > blen - bstart or bstart < 0:
79 return False
77 return False
80 for i in pycompat.xrange(alen):
78 for i in range(alen):
81 if a[i][1:] != b[i + bstart]:
79 if a[i][1:] != b[i + bstart]:
82 return False
80 return False
83 return True
81 return True
@@ -7,7 +7,6 b''
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import
11
10
12 from .i18n import _
11 from .i18n import _
13
12
@@ -5,11 +5,9 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import collections
9 import collections
11 import contextlib
10 import contextlib
12 import errno
13 import os
11 import os
14 import stat
12 import stat
15 import uuid
13 import uuid
@@ -29,7 +27,6 b' from . import ('
29 policy,
27 policy,
30 pycompat,
28 pycompat,
31 scmutil,
29 scmutil,
32 sparse,
33 util,
30 util,
34 )
31 )
35
32
@@ -91,7 +88,7 b' def requires_no_parents_change(func):'
91
88
92
89
93 @interfaceutil.implementer(intdirstate.idirstate)
90 @interfaceutil.implementer(intdirstate.idirstate)
94 class dirstate(object):
91 class dirstate:
95 def __init__(
92 def __init__(
96 self,
93 self,
97 opener,
94 opener,
@@ -115,6 +112,7 b' class dirstate(object):'
115 self._opener = opener
112 self._opener = opener
116 self._validate = validate
113 self._validate = validate
117 self._root = root
114 self._root = root
115 # Either build a sparse-matcher or None if sparse is disabled
118 self._sparsematchfn = sparsematchfn
116 self._sparsematchfn = sparsematchfn
119 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
117 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
120 # UNC path pointing to root share (issue4557)
118 # UNC path pointing to root share (issue4557)
@@ -186,7 +184,11 b' class dirstate(object):'
186 The working directory may not include every file from a manifest. The
184 The working directory may not include every file from a manifest. The
187 matcher obtained by this property will match a path if it is to be
185 matcher obtained by this property will match a path if it is to be
188 included in the working directory.
186 included in the working directory.
187
188 When sparse if disabled, return None.
189 """
189 """
190 if self._sparsematchfn is None:
191 return None
190 # TODO there is potential to cache this property. For now, the matcher
192 # TODO there is potential to cache this property. For now, the matcher
191 # is resolved on every access. (But the called function does use a
193 # is resolved on every access. (But the called function does use a
192 # cache to keep the lookup fast.)
194 # cache to keep the lookup fast.)
@@ -196,9 +198,7 b' class dirstate(object):'
196 def _branch(self):
198 def _branch(self):
197 try:
199 try:
198 return self._opener.read(b"branch").strip() or b"default"
200 return self._opener.read(b"branch").strip() or b"default"
199 except IOError as inst:
201 except FileNotFoundError:
200 if inst.errno != errno.ENOENT:
201 raise
202 return b"default"
202 return b"default"
203
203
204 @property
204 @property
@@ -343,7 +343,7 b' class dirstate(object):'
343 return iter(sorted(self._map))
343 return iter(sorted(self._map))
344
344
345 def items(self):
345 def items(self):
346 return pycompat.iteritems(self._map)
346 return self._map.items()
347
347
348 iteritems = items
348 iteritems = items
349
349
@@ -427,6 +427,7 b' class dirstate(object):'
427 return
427 return
428 self._dirty = True
428 self._dirty = True
429 if source is not None:
429 if source is not None:
430 self._check_sparse(source)
430 self._map.copymap[dest] = source
431 self._map.copymap[dest] = source
431 else:
432 else:
432 self._map.copymap.pop(dest, None)
433 self._map.copymap.pop(dest, None)
@@ -588,6 +589,19 b' class dirstate(object):'
588 msg = _(b'file %r in dirstate clashes with %r')
589 msg = _(b'file %r in dirstate clashes with %r')
589 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
590 msg %= (pycompat.bytestr(d), pycompat.bytestr(filename))
590 raise error.Abort(msg)
591 raise error.Abort(msg)
592 self._check_sparse(filename)
593
594 def _check_sparse(self, filename):
595 """Check that a filename is inside the sparse profile"""
596 sparsematch = self._sparsematcher
597 if sparsematch is not None and not sparsematch.always():
598 if not sparsematch(filename):
599 msg = _(b"cannot add '%s' - it is outside the sparse checkout")
600 hint = _(
601 b'include file with `hg debugsparse --include <pattern>` or use '
602 b'`hg add -s <file>` to include file directory while adding'
603 )
604 raise error.Abort(msg % filename, hint=hint)
591
605
592 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
606 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
593 if exists is None:
607 if exists is None:
@@ -670,6 +684,20 b' class dirstate(object):'
670 self._dirty = True
684 self._dirty = True
671
685
672 def rebuild(self, parent, allfiles, changedfiles=None):
686 def rebuild(self, parent, allfiles, changedfiles=None):
687
688 matcher = self._sparsematcher
689 if matcher is not None and not matcher.always():
690 # should not add non-matching files
691 allfiles = [f for f in allfiles if matcher(f)]
692 if changedfiles:
693 changedfiles = [f for f in changedfiles if matcher(f)]
694
695 if changedfiles is not None:
696 # these files will be deleted from the dirstate when they are
697 # not found to be in allfiles
698 dirstatefilestoremove = {f for f in self if not matcher(f)}
699 changedfiles = dirstatefilestoremove.union(changedfiles)
700
673 if changedfiles is None:
701 if changedfiles is None:
674 # Rebuild entire dirstate
702 # Rebuild entire dirstate
675 to_lookup = allfiles
703 to_lookup = allfiles
@@ -771,9 +799,7 b' class dirstate(object):'
771 def _writedirstate(self, tr, st):
799 def _writedirstate(self, tr, st):
772 # notify callbacks about parents change
800 # notify callbacks about parents change
773 if self._origpl is not None and self._origpl != self._pl:
801 if self._origpl is not None and self._origpl != self._pl:
774 for c, callback in sorted(
802 for c, callback in sorted(self._plchangecallbacks.items()):
775 pycompat.iteritems(self._plchangecallbacks)
776 ):
777 callback(self, self._origpl, self._pl)
803 callback(self, self._origpl, self._pl)
778 self._origpl = None
804 self._origpl = None
779 self._map.write(tr, st)
805 self._map.write(tr, st)
@@ -936,7 +962,7 b' class dirstate(object):'
936 if match.isexact() and self._checkcase:
962 if match.isexact() and self._checkcase:
937 normed = {}
963 normed = {}
938
964
939 for f, st in pycompat.iteritems(results):
965 for f, st in results.items():
940 if st is None:
966 if st is None:
941 continue
967 continue
942
968
@@ -949,7 +975,7 b' class dirstate(object):'
949
975
950 paths.add(f)
976 paths.add(f)
951
977
952 for norm, paths in pycompat.iteritems(normed):
978 for norm, paths in normed.items():
953 if len(paths) > 1:
979 if len(paths) > 1:
954 for path in paths:
980 for path in paths:
955 folded = self._discoverpath(
981 folded = self._discoverpath(
@@ -986,6 +1012,11 b' class dirstate(object):'
986 ignore = util.always
1012 ignore = util.always
987 dirignore = util.always
1013 dirignore = util.always
988
1014
1015 if self._sparsematchfn is not None:
1016 em = matchmod.exact(match.files())
1017 sm = matchmod.unionmatcher([self._sparsematcher, em])
1018 match = matchmod.intersectmatchers(match, sm)
1019
989 matchfn = match.matchfn
1020 matchfn = match.matchfn
990 matchalways = match.always()
1021 matchalways = match.always()
991 matchtdir = match.traversedir
1022 matchtdir = match.traversedir
@@ -1040,13 +1071,11 b' class dirstate(object):'
1040 try:
1071 try:
1041 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1072 with tracing.log('dirstate.walk.traverse listdir %s', nd):
1042 entries = listdir(join(nd), stat=True, skip=skip)
1073 entries = listdir(join(nd), stat=True, skip=skip)
1043 except OSError as inst:
1074 except (PermissionError, FileNotFoundError) as inst:
1044 if inst.errno in (errno.EACCES, errno.ENOENT):
1075 match.bad(
1045 match.bad(
1076 self.pathto(nd), encoding.strtolocal(inst.strerror)
1046 self.pathto(nd), encoding.strtolocal(inst.strerror)
1077 )
1047 )
1078 continue
1048 continue
1049 raise
1050 for f, kind, st in entries:
1079 for f, kind, st in entries:
1051 # Some matchers may return files in the visitentries set,
1080 # Some matchers may return files in the visitentries set,
1052 # instead of 'this', if the matcher explicitly mentions them
1081 # instead of 'this', if the matcher explicitly mentions them
@@ -1149,6 +1178,10 b' class dirstate(object):'
1149 return results
1178 return results
1150
1179
1151 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1180 def _rust_status(self, matcher, list_clean, list_ignored, list_unknown):
1181 if self._sparsematchfn is not None:
1182 em = matchmod.exact(matcher.files())
1183 sm = matchmod.unionmatcher([self._sparsematcher, em])
1184 matcher = matchmod.intersectmatchers(matcher, sm)
1152 # Force Rayon (Rust parallelism library) to respect the number of
1185 # Force Rayon (Rust parallelism library) to respect the number of
1153 # workers. This is a temporary workaround until Rust code knows
1186 # workers. This is a temporary workaround until Rust code knows
1154 # how to read the config file.
1187 # how to read the config file.
@@ -1255,6 +1288,9 b' class dirstate(object):'
1255 matchmod.alwaysmatcher,
1288 matchmod.alwaysmatcher,
1256 matchmod.exactmatcher,
1289 matchmod.exactmatcher,
1257 matchmod.includematcher,
1290 matchmod.includematcher,
1291 matchmod.intersectionmatcher,
1292 matchmod.nevermatcher,
1293 matchmod.unionmatcher,
1258 )
1294 )
1259
1295
1260 if rustmod is None:
1296 if rustmod is None:
@@ -1264,8 +1300,6 b' class dirstate(object):'
1264 use_rust = False
1300 use_rust = False
1265 elif subrepos:
1301 elif subrepos:
1266 use_rust = False
1302 use_rust = False
1267 elif sparse.enabled:
1268 use_rust = False
1269 elif not isinstance(match, allowed_matchers):
1303 elif not isinstance(match, allowed_matchers):
1270 # Some matchers have yet to be implemented
1304 # Some matchers have yet to be implemented
1271 use_rust = False
1305 use_rust = False
@@ -1311,9 +1345,9 b' class dirstate(object):'
1311 # - match.traversedir does something, because match.traversedir should
1345 # - match.traversedir does something, because match.traversedir should
1312 # be called for every dir in the working dir
1346 # be called for every dir in the working dir
1313 full = listclean or match.traversedir is not None
1347 full = listclean or match.traversedir is not None
1314 for fn, st in pycompat.iteritems(
1348 for fn, st in self.walk(
1315 self.walk(match, subrepos, listunknown, listignored, full=full)
1349 match, subrepos, listunknown, listignored, full=full
1316 ):
1350 ).items():
1317 if not dcontains(fn):
1351 if not dcontains(fn):
1318 if (listignored or mexact(fn)) and dirignore(fn):
1352 if (listignored or mexact(fn)) and dirignore(fn):
1319 if listignored:
1353 if listignored:
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import os
9 import os
11 from .i18n import _
10 from .i18n import _
@@ -3,9 +3,6 b''
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
7
8 import errno
9
6
10 from .i18n import _
7 from .i18n import _
11
8
@@ -13,7 +10,6 b' from . import ('
13 error,
10 error,
14 pathutil,
11 pathutil,
15 policy,
12 policy,
16 pycompat,
17 txnutil,
13 txnutil,
18 util,
14 util,
19 )
15 )
@@ -36,7 +32,7 b' else:'
36 rangemask = 0x7FFFFFFF
32 rangemask = 0x7FFFFFFF
37
33
38
34
39 class _dirstatemapcommon(object):
35 class _dirstatemapcommon:
40 """
36 """
41 Methods that are identical for both implementations of the dirstatemap
37 Methods that are identical for both implementations of the dirstatemap
42 class, with and without Rust extensions enabled.
38 class, with and without Rust extensions enabled.
@@ -81,134 +77,6 b' class _dirstatemapcommon(object):'
81 def __getitem__(self, item):
77 def __getitem__(self, item):
82 return self._map[item]
78 return self._map[item]
83
79
84 ### sub-class utility method
85 #
86 # Use to allow for generic implementation of some method while still coping
87 # with minor difference between implementation.
88
89 def _dirs_incr(self, filename, old_entry=None):
90 """incremente the dirstate counter if applicable
91
92 This might be a no-op for some subclass who deal with directory
93 tracking in a different way.
94 """
95
96 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
97 """decremente the dirstate counter if applicable
98
99 This might be a no-op for some subclass who deal with directory
100 tracking in a different way.
101 """
102
103 def _refresh_entry(self, f, entry):
104 """record updated state of an entry"""
105
106 def _insert_entry(self, f, entry):
107 """add a new dirstate entry (or replace an unrelated one)
108
109 The fact it is actually new is the responsability of the caller
110 """
111
112 def _drop_entry(self, f):
113 """remove any entry for file f
114
115 This should also drop associated copy information
116
117 The fact we actually need to drop it is the responsability of the caller"""
118
119 ### method to manipulate the entries
120
121 def set_possibly_dirty(self, filename):
122 """record that the current state of the file on disk is unknown"""
123 entry = self[filename]
124 entry.set_possibly_dirty()
125 self._refresh_entry(filename, entry)
126
127 def set_clean(self, filename, mode, size, mtime):
128 """mark a file as back to a clean state"""
129 entry = self[filename]
130 size = size & rangemask
131 entry.set_clean(mode, size, mtime)
132 self._refresh_entry(filename, entry)
133 self.copymap.pop(filename, None)
134
135 def set_tracked(self, filename):
136 new = False
137 entry = self.get(filename)
138 if entry is None:
139 self._dirs_incr(filename)
140 entry = DirstateItem(
141 wc_tracked=True,
142 )
143
144 self._insert_entry(filename, entry)
145 new = True
146 elif not entry.tracked:
147 self._dirs_incr(filename, entry)
148 entry.set_tracked()
149 self._refresh_entry(filename, entry)
150 new = True
151 else:
152 # XXX This is probably overkill for more case, but we need this to
153 # fully replace the `normallookup` call with `set_tracked` one.
154 # Consider smoothing this in the future.
155 entry.set_possibly_dirty()
156 self._refresh_entry(filename, entry)
157 return new
158
159 def set_untracked(self, f):
160 """Mark a file as no longer tracked in the dirstate map"""
161 entry = self.get(f)
162 if entry is None:
163 return False
164 else:
165 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
166 if not entry.p2_info:
167 self.copymap.pop(f, None)
168 entry.set_untracked()
169 self._refresh_entry(f, entry)
170 return True
171
172 def reset_state(
173 self,
174 filename,
175 wc_tracked=False,
176 p1_tracked=False,
177 p2_info=False,
178 has_meaningful_mtime=True,
179 has_meaningful_data=True,
180 parentfiledata=None,
181 ):
182 """Set a entry to a given state, diregarding all previous state
183
184 This is to be used by the part of the dirstate API dedicated to
185 adjusting the dirstate after a update/merge.
186
187 note: calling this might result to no entry existing at all if the
188 dirstate map does not see any point at having one for this file
189 anymore.
190 """
191 # copy information are now outdated
192 # (maybe new information should be in directly passed to this function)
193 self.copymap.pop(filename, None)
194
195 if not (p1_tracked or p2_info or wc_tracked):
196 old_entry = self._map.get(filename)
197 self._drop_entry(filename)
198 self._dirs_decr(filename, old_entry=old_entry)
199 return
200
201 old_entry = self._map.get(filename)
202 self._dirs_incr(filename, old_entry)
203 entry = DirstateItem(
204 wc_tracked=wc_tracked,
205 p1_tracked=p1_tracked,
206 p2_info=p2_info,
207 has_meaningful_mtime=has_meaningful_mtime,
208 parentfiledata=parentfiledata,
209 )
210 self._insert_entry(filename, entry)
211
212 ### disk interaction
80 ### disk interaction
213
81
214 def _opendirstatefile(self):
82 def _opendirstatefile(self):
@@ -225,9 +93,7 b' class _dirstatemapcommon(object):'
225 try:
93 try:
226 with self._opendirstatefile() as fp:
94 with self._opendirstatefile() as fp:
227 return fp.read(size)
95 return fp.read(size)
228 except IOError as err:
96 except FileNotFoundError:
229 if err.errno != errno.ENOENT:
230 raise
231 # File doesn't exist, so the current state is empty
97 # File doesn't exist, so the current state is empty
232 return b''
98 return b''
233
99
@@ -355,7 +221,7 b' class dirstatemap(_dirstatemapcommon):'
355 util.clearcachedproperty(self, b"dirfoldmap")
221 util.clearcachedproperty(self, b"dirfoldmap")
356
222
357 def items(self):
223 def items(self):
358 return pycompat.iteritems(self._map)
224 return self._map.items()
359
225
360 # forward for python2,3 compat
226 # forward for python2,3 compat
361 iteritems = items
227 iteritems = items
@@ -379,7 +245,7 b' class dirstatemap(_dirstatemapcommon):'
379 self._dirtyparents = True
245 self._dirtyparents = True
380 copies = {}
246 copies = {}
381 if fold_p2:
247 if fold_p2:
382 for f, s in pycompat.iteritems(self._map):
248 for f, s in self._map.items():
383 # Discard "merged" markers when moving away from a merge state
249 # Discard "merged" markers when moving away from a merge state
384 if s.p2_info:
250 if s.p2_info:
385 source = self.copymap.pop(f, None)
251 source = self.copymap.pop(f, None)
@@ -465,7 +331,7 b' class dirstatemap(_dirstatemapcommon):'
465 # (e.g. "has_dir")
331 # (e.g. "has_dir")
466
332
467 def _dirs_incr(self, filename, old_entry=None):
333 def _dirs_incr(self, filename, old_entry=None):
468 """incremente the dirstate counter if applicable"""
334 """increment the dirstate counter if applicable"""
469 if (
335 if (
470 old_entry is None or old_entry.removed
336 old_entry is None or old_entry.removed
471 ) and "_dirs" in self.__dict__:
337 ) and "_dirs" in self.__dict__:
@@ -474,7 +340,7 b' class dirstatemap(_dirstatemapcommon):'
474 self._alldirs.addpath(filename)
340 self._alldirs.addpath(filename)
475
341
476 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
342 def _dirs_decr(self, filename, old_entry=None, remove_variant=False):
477 """decremente the dirstate counter if applicable"""
343 """decrement the dirstate counter if applicable"""
478 if old_entry is not None:
344 if old_entry is not None:
479 if "_dirs" in self.__dict__ and not old_entry.removed:
345 if "_dirs" in self.__dict__ and not old_entry.removed:
480 self._dirs.delpath(filename)
346 self._dirs.delpath(filename)
@@ -502,7 +368,7 b' class dirstatemap(_dirstatemapcommon):'
502
368
503 f = {}
369 f = {}
504 normcase = util.normcase
370 normcase = util.normcase
505 for name, s in pycompat.iteritems(self._map):
371 for name, s in self._map.items():
506 if not s.removed:
372 if not s.removed:
507 f[normcase(name)] = name
373 f[normcase(name)] = name
508 f[b'.'] = b'.' # prevents useless util.fspath() invocation
374 f[b'.'] = b'.' # prevents useless util.fspath() invocation
@@ -540,14 +406,107 b' class dirstatemap(_dirstatemapcommon):'
540
406
541 ### code related to manipulation of entries and copy-sources
407 ### code related to manipulation of entries and copy-sources
542
408
409 def reset_state(
410 self,
411 filename,
412 wc_tracked=False,
413 p1_tracked=False,
414 p2_info=False,
415 has_meaningful_mtime=True,
416 parentfiledata=None,
417 ):
418 """Set a entry to a given state, diregarding all previous state
419
420 This is to be used by the part of the dirstate API dedicated to
421 adjusting the dirstate after a update/merge.
422
423 note: calling this might result to no entry existing at all if the
424 dirstate map does not see any point at having one for this file
425 anymore.
426 """
427 # copy information are now outdated
428 # (maybe new information should be in directly passed to this function)
429 self.copymap.pop(filename, None)
430
431 if not (p1_tracked or p2_info or wc_tracked):
432 old_entry = self._map.get(filename)
433 self._drop_entry(filename)
434 self._dirs_decr(filename, old_entry=old_entry)
435 return
436
437 old_entry = self._map.get(filename)
438 self._dirs_incr(filename, old_entry)
439 entry = DirstateItem(
440 wc_tracked=wc_tracked,
441 p1_tracked=p1_tracked,
442 p2_info=p2_info,
443 has_meaningful_mtime=has_meaningful_mtime,
444 parentfiledata=parentfiledata,
445 )
446 self._map[filename] = entry
447
448 def set_tracked(self, filename):
449 new = False
450 entry = self.get(filename)
451 if entry is None:
452 self._dirs_incr(filename)
453 entry = DirstateItem(
454 wc_tracked=True,
455 )
456
457 self._map[filename] = entry
458 new = True
459 elif not entry.tracked:
460 self._dirs_incr(filename, entry)
461 entry.set_tracked()
462 self._refresh_entry(filename, entry)
463 new = True
464 else:
465 # XXX This is probably overkill for more case, but we need this to
466 # fully replace the `normallookup` call with `set_tracked` one.
467 # Consider smoothing this in the future.
468 entry.set_possibly_dirty()
469 self._refresh_entry(filename, entry)
470 return new
471
472 def set_untracked(self, f):
473 """Mark a file as no longer tracked in the dirstate map"""
474 entry = self.get(f)
475 if entry is None:
476 return False
477 else:
478 self._dirs_decr(f, old_entry=entry, remove_variant=not entry.added)
479 if not entry.p2_info:
480 self.copymap.pop(f, None)
481 entry.set_untracked()
482 self._refresh_entry(f, entry)
483 return True
484
485 def set_clean(self, filename, mode, size, mtime):
486 """mark a file as back to a clean state"""
487 entry = self[filename]
488 size = size & rangemask
489 entry.set_clean(mode, size, mtime)
490 self._refresh_entry(filename, entry)
491 self.copymap.pop(filename, None)
492
493 def set_possibly_dirty(self, filename):
494 """record that the current state of the file on disk is unknown"""
495 entry = self[filename]
496 entry.set_possibly_dirty()
497 self._refresh_entry(filename, entry)
498
543 def _refresh_entry(self, f, entry):
499 def _refresh_entry(self, f, entry):
500 """record updated state of an entry"""
544 if not entry.any_tracked:
501 if not entry.any_tracked:
545 self._map.pop(f, None)
502 self._map.pop(f, None)
546
503
547 def _insert_entry(self, f, entry):
504 def _drop_entry(self, f):
548 self._map[f] = entry
505 """remove any entry for file f
549
506
550 def _drop_entry(self, f):
507 This should also drop associated copy information
508
509 The fact we actually need to drop it is the responsability of the caller"""
551 self._map.pop(f, None)
510 self._map.pop(f, None)
552 self.copymap.pop(f, None)
511 self.copymap.pop(f, None)
553
512
@@ -630,22 +589,7 b' if rustmod is not None:'
630 self._dirtyparents = True
589 self._dirtyparents = True
631 copies = {}
590 copies = {}
632 if fold_p2:
591 if fold_p2:
633 # Collect into an intermediate list to avoid a `RuntimeError`
592 copies = self._map.setparents_fixup()
634 # exception due to mutation during iteration.
635 # TODO: move this the whole loop to Rust where `iter_mut`
636 # enables in-place mutation of elements of a collection while
637 # iterating it, without mutating the collection itself.
638 files_with_p2_info = [
639 f for f, s in self._map.items() if s.p2_info
640 ]
641 rust_map = self._map
642 for f in files_with_p2_info:
643 e = rust_map.get(f)
644 source = self.copymap.pop(f, None)
645 if source:
646 copies[f] = source
647 e.drop_merge_data()
648 rust_map.set_dirstate_item(f, e)
649 return copies
593 return copies
650
594
651 ### disk interaction
595 ### disk interaction
@@ -715,18 +659,32 b' if rustmod is not None:'
715
659
716 ### code related to manipulation of entries and copy-sources
660 ### code related to manipulation of entries and copy-sources
717
661
718 def _refresh_entry(self, f, entry):
662 def set_tracked(self, f):
719 if not entry.any_tracked:
663 return self._map.set_tracked(f)
720 self._map.drop_item_and_copy_source(f)
664
721 else:
665 def set_untracked(self, f):
722 self._map.addfile(f, entry)
666 return self._map.set_untracked(f)
667
668 def set_clean(self, filename, mode, size, mtime):
669 self._map.set_clean(filename, mode, size, mtime)
670
671 def set_possibly_dirty(self, f):
672 self._map.set_possibly_dirty(f)
723
673
724 def _insert_entry(self, f, entry):
674 def reset_state(
725 self._map.addfile(f, entry)
675 self,
726
676 filename,
727 def _drop_entry(self, f):
677 wc_tracked=False,
728 self._map.drop_item_and_copy_source(f)
678 p1_tracked=False,
729
679 p2_info=False,
730 def __setitem__(self, key, value):
680 has_meaningful_mtime=True,
731 assert isinstance(value, DirstateItem)
681 parentfiledata=None,
732 self._map.set_dirstate_item(key, value)
682 ):
683 return self._map.reset_state(
684 filename,
685 wc_tracked,
686 p1_tracked,
687 p2_info,
688 has_meaningful_mtime,
689 parentfiledata,
690 )
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import struct
9 import struct
11
10
@@ -29,7 +28,7 b' HEADER = struct.Struct('
29 )
28 )
30
29
31
30
32 class DirstateDocket(object):
31 class DirstateDocket:
33 data_filename_pattern = b'dirstate.%s'
32 data_filename_pattern = b'dirstate.%s'
34
33
35 def __init__(self, parents, data_size, tree_metadata, uuid):
34 def __init__(self, parents, data_size, tree_metadata, uuid):
@@ -3,7 +3,6 b''
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 from __future__ import absolute_import
7
6
8 import functools
7 import functools
9 import os
8 import os
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import struct
9 import struct
11
10
@@ -126,7 +125,7 b' def slice_with_len(data, start, len):'
126
125
127
126
128 @attr.s
127 @attr.s
129 class Node(object):
128 class Node:
130 path = attr.ib()
129 path = attr.ib()
131 entry = attr.ib()
130 entry = attr.ib()
132 parent = attr.ib(default=None)
131 parent = attr.ib(default=None)
@@ -5,7 +5,6 b''
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
9
8
10 import functools
9 import functools
11
10
@@ -74,7 +73,7 b' def findcommonincoming(repo, remote, hea'
74 return (list(common), anyinc, heads or list(srvheads))
73 return (list(common), anyinc, heads or list(srvheads))
75
74
76
75
77 class outgoing(object):
76 class outgoing:
78 """Represents the result of a findcommonoutgoing() call.
77 """Represents the result of a findcommonoutgoing() call.
79
78
80 Members:
79 Members:
@@ -238,7 +237,7 b' def _headssummary(pushop):'
238
237
239 knownnode = cl.hasnode # do not use nodemap until it is filtered
238 knownnode = cl.hasnode # do not use nodemap until it is filtered
240 # A. register remote heads of branches which are in outgoing set
239 # A. register remote heads of branches which are in outgoing set
241 for branch, heads in pycompat.iteritems(remotemap):
240 for branch, heads in remotemap.items():
242 # don't add head info about branches which we don't have locally
241 # don't add head info about branches which we don't have locally
243 if branch not in branches:
242 if branch not in branches:
244 continue
243 continue
@@ -262,14 +261,14 b' def _headssummary(pushop):'
262 repo,
261 repo,
263 (
262 (
264 (branch, heads[1])
263 (branch, heads[1])
265 for branch, heads in pycompat.iteritems(headssum)
264 for branch, heads in headssum.items()
266 if heads[0] is not None
265 if heads[0] is not None
267 ),
266 ),
268 )
267 )
269 newmap.update(repo, (ctx.rev() for ctx in missingctx))
268 newmap.update(repo, (ctx.rev() for ctx in missingctx))
270 for branch, newheads in pycompat.iteritems(newmap):
269 for branch, newheads in newmap.items():
271 headssum[branch][1][:] = newheads
270 headssum[branch][1][:] = newheads
272 for branch, items in pycompat.iteritems(headssum):
271 for branch, items in headssum.items():
273 for l in items:
272 for l in items:
274 if l is not None:
273 if l is not None:
275 l.sort()
274 l.sort()
@@ -380,9 +379,7 b' def checkheads(pushop):'
380 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
379 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
381 pushop.pushbranchmap = headssum
380 pushop.pushbranchmap = headssum
382 newbranches = [
381 newbranches = [
383 branch
382 branch for branch, heads in headssum.items() if heads[0] is None
384 for branch, heads in pycompat.iteritems(headssum)
385 if heads[0] is None
386 ]
383 ]
387 # 1. Check for new branches on the remote.
384 # 1. Check for new branches on the remote.
388 if newbranches and not newbranch: # new branch requires --new-branch
385 if newbranches and not newbranch: # new branch requires --new-branch
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file, binary diff hidden
NO CONTENT: modified file, binary diff hidden
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now