##// END OF EJS Templates
release: merge default into stable for 4.7 release freeze
Augie Fackler -
r38762:7acec940 merge stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,27 b''
1 #include "fuzzutil.h"
2
3 #include <cstring>
4 #include <utility>
5
6 contrib::optional<two_inputs> SplitInputs(const uint8_t *Data, size_t Size)
7 {
8 if (!Size) {
9 return contrib::nullopt;
10 }
11 // figure out a random point in [0, Size] to split our input.
12 size_t left_size = (Data[0] / 255.0) * (Size - 1);
13
14 // Copy inputs to new allocations so if bdiff over-reads
15 // AddressSanitizer can detect it.
16 std::unique_ptr<char[]> left(new char[left_size]);
17 std::memcpy(left.get(), Data + 1, left_size);
18 // right starts at the next byte after left ends
19 size_t right_size = Size - (left_size + 1);
20 std::unique_ptr<char[]> right(new char[right_size]);
21 std::memcpy(right.get(), Data + 1 + left_size, right_size);
22 LOG(2) << "inputs are " << left_size << " and " << right_size
23 << " bytes" << std::endl;
24 two_inputs result = {std::move(right), right_size, std::move(left),
25 left_size};
26 return result;
27 }
@@ -0,0 +1,47 b''
1 #ifndef CONTRIB_FUZZ_FUZZUTIL_H
2 #define CONTRIB_FUZZ_FUZZUTIL_H
3 #include <iostream>
4 #include <memory>
5 #include <stdint.h>
6
7 /* Try and use std::optional, but failing that assume we'll have a
8 * workable https://abseil.io/ install on the include path to get
9 * their backport of std::optional. */
10 #ifdef __has_include
11 #if __has_include(<optional>) && __cplusplus >= 201703L
12 #include <optional>
13 #define CONTRIB_FUZZ_HAVE_STD_OPTIONAL
14 #endif
15 #endif
16 #ifdef CONTRIB_FUZZ_HAVE_STD_OPTIONAL
17 namespace contrib
18 {
19 using std::nullopt;
20 using std::optional;
21 } /* namespace contrib */
22 #else
23 #include "third_party/absl/types/optional.h"
24 namespace contrib
25 {
26 using absl::nullopt;
27 using absl::optional;
28 } /* namespace contrib */
29 #endif
30
31 /* set DEBUG to 1 for a few debugging prints, or 2 for a lot */
32 #define DEBUG 0
33 #define LOG(level) \
34 if (level <= DEBUG) \
35 std::cout
36
37 struct two_inputs {
38 std::unique_ptr<char[]> right;
39 size_t right_size;
40 std::unique_ptr<char[]> left;
41 size_t left_size;
42 };
43
44 /* Split a non-zero-length input into two inputs. */
45 contrib::optional<two_inputs> SplitInputs(const uint8_t *Data, size_t Size);
46
47 #endif /* CONTRIB_FUZZ_FUZZUTIL_H */
@@ -0,0 +1,122 b''
1 /*
2 * mpatch.cc - fuzzer harness for mpatch.c
3 *
4 * Copyright 2018, Google Inc.
5 *
6 * This software may be used and distributed according to the terms of
7 * the GNU General Public License, incorporated herein by reference.
8 */
9 #include <iostream>
10 #include <memory>
11 #include <stdint.h>
12 #include <stdlib.h>
13 #include <vector>
14
15 #include "fuzzutil.h"
16
17 // To avoid having too many OOMs from the fuzzer infrastructure, we'll
18 // skip patch application if the resulting fulltext would be bigger
19 // than 10MiB.
20 #define MAX_OUTPUT_SIZE 10485760
21
22 extern "C" {
23 #include "bitmanipulation.h"
24 #include "mpatch.h"
25
26 struct mpatchbin {
27 std::unique_ptr<char[]> data;
28 size_t len;
29 };
30
31 static mpatch_flist *getitem(void *vbins, ssize_t pos)
32 {
33 std::vector<mpatchbin> *bins = (std::vector<mpatchbin> *)vbins;
34 const mpatchbin &bin = bins->at(pos + 1);
35 struct mpatch_flist *res;
36 LOG(2) << "mpatch_decode " << bin.len << std::endl;
37 if (mpatch_decode(bin.data.get(), bin.len, &res) < 0)
38 return NULL;
39 return res;
40 }
41
42 // input format:
43 // u8 number of inputs
44 // one u16 for each input, its length
45 // the inputs
46 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
47 {
48 if (!Size) {
49 return 0;
50 }
51 // First byte of data is how many texts we expect, first text
52 // being the base the rest being the deltas.
53 ssize_t numtexts = Data[0];
54 if (numtexts < 2) {
55 // No point if we don't have at least a base text and a delta...
56 return 0;
57 }
58 // Each text will be described by a byte for how long it
59 // should be, so give up if we don't have enough.
60 if ((Size - 1) < (numtexts * 2)) {
61 return 0;
62 }
63 size_t consumed = 1 + (numtexts * 2);
64 LOG(2) << "input contains " << Size << std::endl;
65 LOG(2) << numtexts << " texts, consuming " << consumed << std::endl;
66 std::vector<mpatchbin> bins;
67 bins.reserve(numtexts);
68 for (int i = 0; i < numtexts; ++i) {
69 mpatchbin bin;
70 size_t nthsize = getbeuint16((char *)Data + 1 + (2 * i));
71 LOG(2) << "text " << i << " is " << nthsize << std::endl;
72 char *start = (char *)Data + consumed;
73 consumed += nthsize;
74 if (consumed > Size) {
75 LOG(2) << "ran out of data, consumed " << consumed
76 << " of " << Size << std::endl;
77 return 0;
78 }
79 bin.len = nthsize;
80 bin.data.reset(new char[nthsize]);
81 memcpy(bin.data.get(), start, nthsize);
82 bins.push_back(std::move(bin));
83 }
84 LOG(2) << "mpatch_flist" << std::endl;
85 struct mpatch_flist *patch =
86 mpatch_fold(&bins, getitem, 0, numtexts - 1);
87 if (!patch) {
88 return 0;
89 }
90 LOG(2) << "mpatch_calcsize" << std::endl;
91 ssize_t outlen = mpatch_calcsize(bins[0].len, patch);
92 LOG(2) << "outlen " << outlen << std::endl;
93 if (outlen < 0 || outlen > MAX_OUTPUT_SIZE) {
94 goto cleanup;
95 }
96 {
97 char *dest = (char *)malloc(outlen);
98 LOG(2) << "expecting " << outlen << " total bytes at "
99 << (void *)dest << std::endl;
100 mpatch_apply(dest, bins[0].data.get(), bins[0].len, patch);
101 free(dest);
102 LOG(1) << "applied a complete patch" << std::endl;
103 }
104 cleanup:
105 mpatch_lfree(patch);
106 return 0;
107 }
108
109 #ifdef HG_FUZZER_INCLUDE_MAIN
110 int main(int argc, char **argv)
111 {
112 // One text, one patch.
113 const char data[] = "\x02\x00\0x1\x00\x0d"
114 // base text
115 "a"
116 // binary delta that will append a single b
117 "\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01b";
118 return LLVMFuzzerTestOneInput((const uint8_t *)data, 19);
119 }
120 #endif
121
122 } // extern "C"
@@ -0,0 +1,345 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
4 import struct
5 import zipfile
6
7 from mercurial import (
8 hg,
9 ui as uimod,
10 )
11
12 ap = argparse.ArgumentParser()
13 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
14 args = ap.parse_args()
15
16 class deltafrag(object):
17 def __init__(self, start, end, data):
18 self.start = start
19 self.end = end
20 self.data = data
21
22 def __str__(self):
23 return struct.pack(
24 ">lll", self.start, self.end, len(self.data)) + self.data
25
26 class delta(object):
27 def __init__(self, frags):
28 self.frags = frags
29
30 def __str__(self):
31 return ''.join(str(f) for f in self.frags)
32
33 class corpus(object):
34
35 def __init__(self, base, deltas):
36 self.base = base
37 self.deltas = deltas
38
39 def __str__(self):
40 deltas = [str(d) for d in self.deltas]
41 parts = (
42 [
43 struct.pack(">B", len(deltas) + 1),
44 struct.pack(">H", len(self.base)),
45 ]
46 + [struct.pack(">H", len(d)) for d in deltas]
47 + [self.base]
48 + deltas
49 )
50 return "".join(parts)
51
52 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
53 # Manually constructed entries
54 zf.writestr(
55 "one_delta_applies",
56 str(corpus('a', [delta([deltafrag(0, 1, 'b')])]))
57 )
58 zf.writestr(
59 "one_delta_starts_late",
60 str(corpus('a', [delta([deltafrag(3, 1, 'b')])]))
61 )
62 zf.writestr(
63 "one_delta_ends_late",
64 str(corpus('a', [delta([deltafrag(0, 20, 'b')])]))
65 )
66
67 try:
68 # Generated from repo data
69 r = hg.repository(uimod.ui(), '../..')
70 fl = r.file('mercurial/manifest.py')
71 rl = getattr(fl, '_revlog', fl)
72 bins = rl._chunks(rl._deltachain(10)[0])
73 zf.writestr('manifest_py_rev_10',
74 str(corpus(bins[0], bins[1:])))
75 except: # skip this, so no re-raises
76 print('skipping seed file from repo data')
77 # Automatically discovered by running the fuzzer
78 zf.writestr(
79 "mpatch_decode_old_overread", "\x02\x00\x00\x00\x02\x00\x00\x00"
80 )
81 # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876
82 zf.writestr(
83 "mpatch_ossfuzz_getbe32_ubsan",
84 "\x02\x00\x00\x00\x0c \xff\xff\xff\xff ")
85 zf.writestr(
86 "mpatch_apply_over_memcpy",
87 '\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00'
88 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
89 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00'
90 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
91 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
92 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
93 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
94 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
95 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
96 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
97 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
98 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
99 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
100 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
101 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
102 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
103 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
104 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
105 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
106 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
107 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
108 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
109 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
110 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
111 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
112 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
113 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
114 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
115 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
116 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
117 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8c\x00\x00\x00\x00'
118 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
119 '\x00\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00\x00\x00\x00\x00\x00'
120 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
121 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
122 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
123 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
124 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
125 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
126 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
127 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
128 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
129 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
130 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
131 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
132 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
133 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
134 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
135 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
136 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
137 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
138 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
139 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
140 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
141 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
142 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
143 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
144 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
145 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
146 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
147 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
148 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
149 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
150 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
151 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
152 '\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00A\x00\x00\x00\x00'
153 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
154 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
155 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
156 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
157 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
158 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
159 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
160 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
161 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
162 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
163 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
164 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
165 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
166 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
167 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
168 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
169 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
170 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
171 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
172 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
173 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
174 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
175 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
176 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
177 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x18'
178 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
179 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
180 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
181 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
182 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
183 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
184 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
185 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
186 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
187 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
188 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
189 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
190 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
191 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
192 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
193 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
194 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
195 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
196 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
197 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
198 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
199 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
200 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
201 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
202 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
203 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
204 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
205 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
206 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
207 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
208 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
209 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
210 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
211 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
212 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
213 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
214 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
215 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
216 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
217 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
218 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
219 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
220 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
221 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
222 '\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
223 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
224 '\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00\x00\x00\x00\x00'
225 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
226 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
227 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
228 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
229 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
230 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
231 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
232 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
233 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
234 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
235 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
236 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
237 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
238 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
239 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
240 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
241 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
242 '\x00\x00\x94\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
243 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
244 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
245 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
246 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
247 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
248 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
249 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
250 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
251 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
252 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
253 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
254 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
255 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
256 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
257 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
258 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
259 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
260 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
261 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
262 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
263 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
264 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
265 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
266 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
267 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
268 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
269 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
270 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
271 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
272 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
273 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
274 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
275 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
276 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
277 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
278 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
279 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
280 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
281 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
282 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
283 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
284 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
285 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
286 '\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
287 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
288 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00'
289 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
290 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
291 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
292 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
293 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
294 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
295 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
296 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
297 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
298 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
299 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
300 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
301 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
302 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
303 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
304 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
305 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
306 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
307 '\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00'
308 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
309 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
310 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
311 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
312 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
313 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
314 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
315 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
316 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
317 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
318 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
319 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
320 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
321 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
322 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00'
323 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
324 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
325 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
326 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
327 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\x00\x00'
328 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
329 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
330 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
331 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
332 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
333 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
334 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
335 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
336 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
337 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
338 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
339 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
340 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
341 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
342 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
343 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00'
344 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
345 '\x00\x00\x00\x00')
@@ -0,0 +1,15 b''
1 FROM fedora:28
2
3 RUN groupadd -g 1000 build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
5
6 RUN dnf install -y \
7 gcc \
8 gettext \
9 make \
10 python-devel \
11 python-docutils \
12 rpm-build
13
14 # For creating repo meta data
15 RUN dnf install -y createrepo
@@ -0,0 +1,111 b''
1 #!/usr/bin/env python3
2 #
3 # Copyright 2018 Gregory Szorc <gregory.szorc@gmail.com>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8 import argparse
9 import pathlib
10 import shutil
11 import subprocess
12 import sys
13
14 def get_docker() -> str:
15 docker = shutil.which('docker.io') or shutil.which('docker')
16 if not docker:
17 print('could not find docker executable')
18 return 1
19
20 try:
21 out = subprocess.check_output([docker, '-h'], stderr=subprocess.STDOUT)
22
23 if b'Jansens' in out:
24 print('%s is the Docking System Tray; try installing docker.io' %
25 docker)
26 sys.exit(1)
27 except subprocess.CalledProcessError as e:
28 print('error calling `%s -h`: %s' % (docker, e.output))
29 sys.exit(1)
30
31 out = subprocess.check_output([docker, 'version'],
32 stderr=subprocess.STDOUT)
33
34 lines = out.splitlines()
35 if not any(l.startswith((b'Client:', b'Client version:')) for l in lines):
36 print('`%s version` does not look like Docker' % docker)
37 sys.exit(1)
38
39 if not any(l.startswith((b'Server:', b'Server version:')) for l in lines):
40 print('`%s version` does not look like Docker' % docker)
41 sys.exit(1)
42
43 return docker
44
45 def get_dockerfile(path: pathlib.Path, args: list) -> bytes:
46 with path.open('rb') as fh:
47 df = fh.read()
48
49 for k, v in args:
50 df = df.replace(b'%%%s%%' % k, v)
51
52 return df
53
54 def build_docker_image(dockerfile: pathlib.Path, params: list, tag: str):
55 """Build a Docker image from a templatized Dockerfile."""
56 docker = get_docker()
57
58 dockerfile_path = pathlib.Path(dockerfile)
59
60 dockerfile = get_dockerfile(dockerfile_path, params)
61
62 print('building Dockerfile:')
63 print(dockerfile.decode('utf-8', 'replace'))
64
65 args = [
66 docker,
67 'build',
68 '--build-arg', 'http_proxy',
69 '--build-arg', 'https_proxy',
70 '--tag', tag,
71 '-',
72 ]
73
74 print('executing: %r' % args)
75 subprocess.run(args, input=dockerfile, check=True)
76
77 def command_build(args):
78 build_args = []
79 for arg in args.build_arg:
80 k, v = arg.split('=', 1)
81 build_args.append((k.encode('utf-8'), v.encode('utf-8')))
82
83 build_docker_image(pathlib.Path(args.dockerfile),
84 build_args,
85 args.tag)
86
87 def command_docker(args):
88 print(get_docker())
89
90 def main() -> int:
91 parser = argparse.ArgumentParser()
92
93 subparsers = parser.add_subparsers(title='subcommands')
94
95 build = subparsers.add_parser('build', help='Build a Docker image')
96 build.set_defaults(func=command_build)
97 build.add_argument('--build-arg', action='append', default=[],
98 help='Substitution to perform in Dockerfile; '
99 'format: key=value')
100 build.add_argument('dockerfile', help='path to Dockerfile to use')
101 build.add_argument('tag', help='Tag to apply to created image')
102
103 docker = subparsers.add_parser('docker-path', help='Resolve path to Docker')
104 docker.set_defaults(func=command_docker)
105
106 args = parser.parse_args()
107
108 return args.func(args)
109
110 if __name__ == '__main__':
111 sys.exit(main())
@@ -0,0 +1,93 b''
1 # -*- coding: UTF-8 -*-
2 # beautifygraph.py - improve graph output by using Unicode characters
3 #
4 # Copyright 2018 John Stiles <johnstiles@gmail.com>
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 '''beautify log -G output by using Unicode characters (EXPERIMENTAL)
10
11 A terminal with UTF-8 support and monospace narrow text are required.
12 '''
13
14 from __future__ import absolute_import
15
16 from mercurial.i18n import _
17 from mercurial import (
18 encoding,
19 extensions,
20 graphmod,
21 templatekw,
22 )
23
24 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
25 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
26 # be specifying the version(s) of Mercurial they are tested with, or
27 # leave the attribute unspecified.
28 testedwith = 'ships-with-hg-core'
29
30 def prettyedge(before, edge, after):
31 if edge == '~':
32 return '\xE2\x95\xA7' # U+2567 ╧
33 if edge == 'X':
34 return '\xE2\x95\xB3' # U+2573 ╳
35 if edge == '/':
36 return '\xE2\x95\xB1' # U+2571 ╱
37 if edge == '-':
38 return '\xE2\x94\x80' # U+2500 ─
39 if edge == '|':
40 return '\xE2\x94\x82' # U+2502 │
41 if edge == ':':
42 return '\xE2\x94\x86' # U+2506 ┆
43 if edge == '\\':
44 return '\xE2\x95\xB2' # U+2572 ╲
45 if edge == '+':
46 if before == ' ' and not after == ' ':
47 return '\xE2\x94\x9C' # U+251C ├
48 if after == ' ' and not before == ' ':
49 return '\xE2\x94\xA4' # U+2524 ┤
50 return '\xE2\x94\xBC' # U+253C ┼
51 return edge
52
53 def convertedges(line):
54 line = ' %s ' % line
55 pretty = []
56 for idx in xrange(len(line) - 2):
57 pretty.append(prettyedge(line[idx], line[idx + 1], line[idx + 2]))
58 return ''.join(pretty)
59
60 def getprettygraphnode(orig, *args, **kwargs):
61 node = orig(*args, **kwargs)
62 if node == 'o':
63 return '\xE2\x97\x8B' # U+25CB ○
64 if node == '@':
65 return '\xE2\x97\x8D' # U+25CD ◍
66 if node == '*':
67 return '\xE2\x88\x97' # U+2217 ∗
68 if node == 'x':
69 return '\xE2\x97\x8C' # U+25CC ◌
70 if node == '_':
71 return '\xE2\x95\xA4' # U+2564 ╤
72 return node
73
74 def outputprettygraph(orig, ui, graph, *args, **kwargs):
75 (edges, text) = zip(*graph)
76 graph = zip([convertedges(e) for e in edges], text)
77 return orig(ui, graph, *args, **kwargs)
78
79 def extsetup(ui):
80 if encoding.encoding != 'UTF-8':
81 ui.warn(_('beautifygraph: unsupported encoding, UTF-8 required\n'))
82 return
83
84 if 'A' in encoding._wide:
85 ui.warn(_('beautifygraph: unsupported terminal settings, '
86 'monospace narrow text required\n'))
87 return
88
89 if ui.plain('graph'):
90 return
91
92 extensions.wrapfunction(graphmod, 'outputgraph', outputprettygraph)
93 extensions.wrapfunction(templatekw, 'getgraphnode', getprettygraphnode)
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -11,3 +11,8 b' trim_trailing_whitespace = true'
11 indent_size = 8
11 indent_size = 8
12 indent_style = tab
12 indent_style = tab
13 trim_trailing_whitespace = true
13 trim_trailing_whitespace = true
14
15 [*.t]
16 indent_size = 2
17 indent_style = space
18 trim_trailing_whitespace = false
@@ -31,8 +31,6 b' build'
31 contrib/chg/chg
31 contrib/chg/chg
32 contrib/hgsh/hgsh
32 contrib/hgsh/hgsh
33 contrib/vagrant/.vagrant
33 contrib/vagrant/.vagrant
34 contrib/docker/debian-*
35 contrib/docker/ubuntu-*
36 dist
34 dist
37 packages
35 packages
38 doc/common.txt
36 doc/common.txt
@@ -164,6 +164,39 b' i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n'
164
164
165 # Packaging targets
165 # Packaging targets
166
166
167 packaging_targets := \
168 centos5 \
169 centos6 \
170 centos7 \
171 deb \
172 docker-centos5 \
173 docker-centos6 \
174 docker-centos7 \
175 docker-debian-jessie \
176 docker-debian-stretch \
177 docker-fedora20 \
178 docker-fedora21 \
179 docker-fedora28 \
180 docker-ubuntu-trusty \
181 docker-ubuntu-trusty-ppa \
182 docker-ubuntu-xenial \
183 docker-ubuntu-xenial-ppa \
184 docker-ubuntu-artful \
185 docker-ubuntu-artful-ppa \
186 docker-ubuntu-bionic \
187 docker-ubuntu-bionic-ppa \
188 fedora20 \
189 fedora21 \
190 fedora28 \
191 linux-wheels \
192 linux-wheels-x86_64 \
193 linux-wheels-i686 \
194 ppa
195
196 # Forward packaging targets for convenience.
197 $(packaging_targets):
198 $(MAKE) -C contrib/packaging $@
199
167 osx:
200 osx:
168 rm -rf build/mercurial
201 rm -rf build/mercurial
169 /usr/bin/python2.7 setup.py install --optimize=1 \
202 /usr/bin/python2.7 setup.py install --optimize=1 \
@@ -197,127 +230,14 b' osx:'
197 --identifier org.mercurial-scm.mercurial \
230 --identifier org.mercurial-scm.mercurial \
198 --version "$${HGVER}" \
231 --version "$${HGVER}" \
199 build/mercurial.pkg && \
232 build/mercurial.pkg && \
200 productbuild --distribution contrib/macosx/distribution.xml \
233 productbuild --distribution contrib/packaging/macosx/distribution.xml \
201 --package-path build/ \
234 --package-path build/ \
202 --version "$${HGVER}" \
235 --version "$${HGVER}" \
203 --resources contrib/macosx/ \
236 --resources contrib/packaging/macosx/ \
204 "$${OUTPUTDIR:-dist/}"/Mercurial-"$${HGVER}"-macosx"$${OSXVER}".pkg
237 "$${OUTPUTDIR:-dist/}"/Mercurial-"$${HGVER}"-macosx"$${OSXVER}".pkg
205
238
206 deb:
207 contrib/builddeb
208
209 ppa:
210 contrib/builddeb --source-only
211
212 contrib/docker/debian-%: contrib/docker/debian.template
213 sed "s/__CODENAME__/$*/" $< > $@
214
215 docker-debian-jessie: contrib/docker/debian-jessie
216 contrib/dockerdeb debian jessie
217
218 docker-debian-stretch: contrib/docker/debian-stretch
219 contrib/dockerdeb debian stretch
220
221 contrib/docker/ubuntu-%: contrib/docker/ubuntu.template
222 sed "s/__CODENAME__/$*/" $< > $@
223
224 docker-ubuntu-trusty: contrib/docker/ubuntu-trusty
225 contrib/dockerdeb ubuntu trusty
226
227 docker-ubuntu-trusty-ppa: contrib/docker/ubuntu-trusty
228 contrib/dockerdeb ubuntu trusty --source-only
229
230 docker-ubuntu-xenial: contrib/docker/ubuntu-xenial
231 contrib/dockerdeb ubuntu xenial
232
233 docker-ubuntu-xenial-ppa: contrib/docker/ubuntu-xenial
234 contrib/dockerdeb ubuntu xenial --source-only
235
236 docker-ubuntu-artful: contrib/docker/ubuntu-artful
237 contrib/dockerdeb ubuntu artful
238
239 docker-ubuntu-artful-ppa: contrib/docker/ubuntu-artful
240 contrib/dockerdeb ubuntu artful --source-only
241
242 docker-ubuntu-bionic: contrib/docker/ubuntu-bionic
243 contrib/dockerdeb ubuntu bionic
244
245 docker-ubuntu-bionic-ppa: contrib/docker/ubuntu-bionic
246 contrib/dockerdeb ubuntu bionic --source-only
247
248 fedora20:
249 mkdir -p packages/fedora20
250 contrib/buildrpm
251 cp rpmbuild/RPMS/*/* packages/fedora20
252 cp rpmbuild/SRPMS/* packages/fedora20
253 rm -rf rpmbuild
254
255 docker-fedora20:
256 mkdir -p packages/fedora20
257 contrib/dockerrpm fedora20
258
259 fedora21:
260 mkdir -p packages/fedora21
261 contrib/buildrpm
262 cp rpmbuild/RPMS/*/* packages/fedora21
263 cp rpmbuild/SRPMS/* packages/fedora21
264 rm -rf rpmbuild
265
266 docker-fedora21:
267 mkdir -p packages/fedora21
268 contrib/dockerrpm fedora21
269
270 centos5:
271 mkdir -p packages/centos5
272 contrib/buildrpm --withpython
273 cp rpmbuild/RPMS/*/* packages/centos5
274 cp rpmbuild/SRPMS/* packages/centos5
275
276 docker-centos5:
277 mkdir -p packages/centos5
278 contrib/dockerrpm centos5 --withpython
279
280 centos6:
281 mkdir -p packages/centos6
282 contrib/buildrpm --withpython
283 cp rpmbuild/RPMS/*/* packages/centos6
284 cp rpmbuild/SRPMS/* packages/centos6
285
286 docker-centos6:
287 mkdir -p packages/centos6
288 contrib/dockerrpm centos6 --withpython
289
290 centos7:
291 mkdir -p packages/centos7
292 contrib/buildrpm
293 cp rpmbuild/RPMS/*/* packages/centos7
294 cp rpmbuild/SRPMS/* packages/centos7
295
296 docker-centos7:
297 mkdir -p packages/centos7
298 contrib/dockerrpm centos7
299
300 linux-wheels: linux-wheels-x86_64 linux-wheels-i686
301
302 linux-wheels-x86_64:
303 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`:/src quay.io/pypa/manylinux1_x86_64 /src/contrib/build-linux-wheels.sh
304
305 linux-wheels-i686:
306 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`:/src quay.io/pypa/manylinux1_i686 linux32 /src/contrib/build-linux-wheels.sh
307
308 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
239 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
309 install-doc install-home install-home-bin install-home-doc \
240 install-doc install-home install-home-bin install-home-doc \
310 dist dist-notests check tests check-code format-c update-pot \
241 dist dist-notests check tests check-code format-c update-pot \
311 osx deb ppa \
242 $(packaging_targets) \
312 docker-debian-jessie \
243 osx
313 docker-debian-stretch \
314 docker-ubuntu-trusty docker-ubuntu-trusty-ppa \
315 docker-ubuntu-xenial docker-ubuntu-xenial-ppa \
316 docker-ubuntu-artful docker-ubuntu-artful-ppa \
317 docker-ubuntu-bionic docker-ubuntu-bionic-ppa \
318 fedora20 docker-fedora20 \
319 fedora21 docker-fedora21 \
320 centos5 docker-centos5 \
321 centos6 docker-centos6 \
322 centos7 docker-centos7 \
323 linux-wheels
@@ -135,3 +135,7 b' head() and author("mpm")'
135 # testing the mutable phases set
135 # testing the mutable phases set
136 draft()
136 draft()
137 secret()
137 secret()
138
139 # test finding common ancestors
140 heads(commonancestors(last(head(), 2)))
141 heads(commonancestors(head()))
@@ -46,3 +46,4 b' parents(20000)'
46 (20000::) - (20000)
46 (20000::) - (20000)
47 # The one below is used by rebase
47 # The one below is used by rebase
48 (children(ancestor(tip~5, tip)) and ::(tip~5))::
48 (children(ancestor(tip~5, tip)) and ::(tip~5))::
49 heads(commonancestors(last(head(), 2)))
@@ -1,4 +1,6 b''
1 # __init__.py - Startup and module loading logic for Mercurial.
1 #!/usr/bin/env python3
2 #
3 # byteify-strings.py - transform string literals to be Python 3 safe
2 #
4 #
3 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
5 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
4 #
6 #
@@ -7,293 +9,217 b''
7
9
8 from __future__ import absolute_import
10 from __future__ import absolute_import
9
11
12 import argparse
13 import contextlib
14 import errno
15 import os
10 import sys
16 import sys
11
17 import tempfile
12 # Allow 'from mercurial import demandimport' to keep working.
18 import token
13 import hgdemandimport
19 import tokenize
14 demandimport = hgdemandimport
15
16 __all__ = []
17
18 # Python 3 uses a custom module loader that transforms source code between
19 # source file reading and compilation. This is done by registering a custom
20 # finder that changes the spec for Mercurial modules to use a custom loader.
21 if sys.version_info[0] >= 3:
22 import importlib
23 import importlib.abc
24 import io
25 import token
26 import tokenize
27
20
28 class hgpathentryfinder(importlib.abc.MetaPathFinder):
21 def adjusttokenpos(t, ofs):
29 """A sys.meta_path finder that uses a custom module loader."""
22 """Adjust start/end column of the given token"""
30 def find_spec(self, fullname, path, target=None):
23 return t._replace(start=(t.start[0], t.start[1] + ofs),
31 # Only handle Mercurial-related modules.
24 end=(t.end[0], t.end[1] + ofs))
32 if not fullname.startswith(('mercurial.', 'hgext.', 'hgext3rd.')):
25
33 return None
26 def replacetokens(tokens, opts):
34 # don't try to parse binary
27 """Transform a stream of tokens from raw to Python 3.
35 if fullname.startswith('mercurial.cext.'):
28
36 return None
29 Returns a generator of possibly rewritten tokens.
37 # third-party packages are expected to be dual-version clean
38 if fullname.startswith('mercurial.thirdparty'):
39 return None
40 # zstd is already dual-version clean, don't try and mangle it
41 if fullname.startswith('mercurial.zstd'):
42 return None
43 # pywatchman is already dual-version clean, don't try and mangle it
44 if fullname.startswith('hgext.fsmonitor.pywatchman'):
45 return None
46
30
47 # Try to find the module using other registered finders.
31 The input token list may be mutated as part of processing. However,
48 spec = None
32 its changes do not necessarily match the output token stream.
49 for finder in sys.meta_path:
33 """
50 if finder == self:
34 sysstrtokens = set()
51 continue
52
53 spec = finder.find_spec(fullname, path, target=target)
54 if spec:
55 break
56
57 # This is a Mercurial-related module but we couldn't find it
58 # using the previously-registered finders. This likely means
59 # the module doesn't exist.
60 if not spec:
61 return None
62
35
63 # TODO need to support loaders from alternate specs, like zip
36 # The following utility functions access the tokens list and i index of
64 # loaders.
37 # the for i, t enumerate(tokens) loop below
65 loader = hgloader(spec.name, spec.origin)
38 def _isop(j, *o):
66 # Can't use util.safehasattr here because that would require
39 """Assert that tokens[j] is an OP with one of the given values"""
67 # importing util, and we're in import code.
40 try:
68 if hasattr(spec.loader, 'loader'): # hasattr-py3-only
41 return tokens[j].type == token.OP and tokens[j].string in o
69 # This is a nested loader (maybe a lazy loader?)
42 except IndexError:
70 spec.loader.loader = loader
43 return False
71 else:
72 spec.loader = loader
73 return spec
74
44
75 def replacetokens(tokens, fullname):
45 def _findargnofcall(n):
76 """Transform a stream of tokens from raw to Python 3.
46 """Find arg n of a call expression (start at 0)
77
78 It is called by the custom module loading machinery to rewrite
79 source/tokens between source decoding and compilation.
80
47
81 Returns a generator of possibly rewritten tokens.
48 Returns index of the first token of that argument, or None if
49 there is not that many arguments.
82
50
83 The input token list may be mutated as part of processing. However,
51 Assumes that token[i + 1] is '('.
84 its changes do not necessarily match the output token stream.
85
52
86 REMEMBER TO CHANGE ``BYTECODEHEADER`` WHEN CHANGING THIS FUNCTION
87 OR CACHED FILES WON'T GET INVALIDATED PROPERLY.
88 """
53 """
89 futureimpline = False
54 nested = 0
90
55 for j in range(i + 2, len(tokens)):
91 # The following utility functions access the tokens list and i index of
56 if _isop(j, ')', ']', '}'):
92 # the for i, t enumerate(tokens) loop below
57 # end of call, tuple, subscription or dict / set
93 def _isop(j, *o):
58 nested -= 1
94 """Assert that tokens[j] is an OP with one of the given values"""
59 if nested < 0:
95 try:
60 return None
96 return tokens[j].type == token.OP and tokens[j].string in o
61 elif n == 0:
97 except IndexError:
62 # this is the starting position of arg
98 return False
63 return j
99
64 elif _isop(j, '(', '[', '{'):
100 def _findargnofcall(n):
65 nested += 1
101 """Find arg n of a call expression (start at 0)
66 elif _isop(j, ',') and nested == 0:
102
67 n -= 1
103 Returns index of the first token of that argument, or None if
104 there is not that many arguments.
105
106 Assumes that token[i + 1] is '('.
107
68
108 """
69 return None
109 nested = 0
70
110 for j in range(i + 2, len(tokens)):
71 def _ensuresysstr(j):
111 if _isop(j, ')', ']', '}'):
72 """Make sure the token at j is a system string
112 # end of call, tuple, subscription or dict / set
73
113 nested -= 1
74 Remember the given token so the string transformer won't add
114 if nested < 0:
75 the byte prefix.
115 return None
116 elif n == 0:
117 # this is the starting position of arg
118 return j
119 elif _isop(j, '(', '[', '{'):
120 nested += 1
121 elif _isop(j, ',') and nested == 0:
122 n -= 1
123
76
124 return None
77 Ignores tokens that are not strings. Assumes bounds checking has
78 already been done.
125
79
126 def _ensureunicode(j):
80 """
127 """Make sure the token at j is a unicode string
81 st = tokens[j]
82 if st.type == token.STRING and st.string.startswith(("'", '"')):
83 sysstrtokens.add(st)
128
84
129 This rewrites a string token to include the unicode literal prefix
85 coldelta = 0 # column increment for new opening parens
130 so the string transformer won't add the byte prefix.
86 coloffset = -1 # column offset for the current line (-1: TBD)
131
87 parens = [(0, 0, 0)] # stack of (line, end-column, column-offset)
132 Ignores tokens that are not strings. Assumes bounds checking has
88 for i, t in enumerate(tokens):
133 already been done.
89 # Compute the column offset for the current line, such that
90 # the current line will be aligned to the last opening paren
91 # as before.
92 if coloffset < 0:
93 if t.start[1] == parens[-1][1]:
94 coloffset = parens[-1][2]
95 elif t.start[1] + 1 == parens[-1][1]:
96 # fix misaligned indent of s/util.Abort/error.Abort/
97 coloffset = parens[-1][2] + (parens[-1][1] - t.start[1])
98 else:
99 coloffset = 0
134
100
135 """
101 # Reset per-line attributes at EOL.
136 st = tokens[j]
102 if t.type in (token.NEWLINE, tokenize.NL):
137 if st.type == token.STRING and st.string.startswith(("'", '"')):
103 yield adjusttokenpos(t, coloffset)
138 tokens[j] = st._replace(string='u%s' % st.string)
104 coldelta = 0
139
105 coloffset = -1
140 for i, t in enumerate(tokens):
106 continue
141 # Convert most string literals to byte literals. String literals
142 # in Python 2 are bytes. String literals in Python 3 are unicode.
143 # Most strings in Mercurial are bytes and unicode strings are rare.
144 # Rather than rewrite all string literals to use ``b''`` to indicate
145 # byte strings, we apply this token transformer to insert the ``b``
146 # prefix nearly everywhere.
147 if t.type == token.STRING:
148 s = t.string
149
107
150 # Preserve docstrings as string literals. This is inconsistent
108 # Remember the last paren position.
151 # with regular unprefixed strings. However, the
109 if _isop(i, '(', '[', '{'):
152 # "from __future__" parsing (which allows a module docstring to
110 parens.append(t.end + (coloffset + coldelta,))
153 # exist before it) doesn't properly handle the docstring if it
111 elif _isop(i, ')', ']', '}'):
154 # is b''' prefixed, leading to a SyntaxError. We leave all
112 parens.pop()
155 # docstrings as unprefixed to avoid this. This means Mercurial
156 # components touching docstrings need to handle unicode,
157 # unfortunately.
158 if s[0:3] in ("'''", '"""'):
159 yield t
160 continue
161
113
162 # If the first character isn't a quote, it is likely a string
114 # Convert most string literals to byte literals. String literals
163 # prefixing character (such as 'b', 'u', or 'r'. Ignore.
115 # in Python 2 are bytes. String literals in Python 3 are unicode.
164 if s[0] not in ("'", '"'):
116 # Most strings in Mercurial are bytes and unicode strings are rare.
165 yield t
117 # Rather than rewrite all string literals to use ``b''`` to indicate
166 continue
118 # byte strings, we apply this token transformer to insert the ``b``
119 # prefix nearly everywhere.
120 if t.type == token.STRING and t not in sysstrtokens:
121 s = t.string
167
122
168 # String literal. Prefix to make a b'' string.
123 # Preserve docstrings as string literals. This is inconsistent
169 yield t._replace(string='b%s' % t.string)
124 # with regular unprefixed strings. However, the
125 # "from __future__" parsing (which allows a module docstring to
126 # exist before it) doesn't properly handle the docstring if it
127 # is b''' prefixed, leading to a SyntaxError. We leave all
128 # docstrings as unprefixed to avoid this. This means Mercurial
129 # components touching docstrings need to handle unicode,
130 # unfortunately.
131 if s[0:3] in ("'''", '"""'):
132 yield adjusttokenpos(t, coloffset)
170 continue
133 continue
171
134
172 # Insert compatibility imports at "from __future__ import" line.
135 # If the first character isn't a quote, it is likely a string
173 # No '\n' should be added to preserve line numbers.
136 # prefixing character (such as 'b', 'u', or 'r'. Ignore.
174 if (t.type == token.NAME and t.string == 'import' and
137 if s[0] not in ("'", '"'):
175 all(u.type == token.NAME for u in tokens[i - 2:i]) and
138 yield adjusttokenpos(t, coloffset)
176 [u.string for u in tokens[i - 2:i]] == ['from', '__future__']):
139 continue
177 futureimpline = True
140
178 if t.type == token.NEWLINE and futureimpline:
141 # String literal. Prefix to make a b'' string.
179 futureimpline = False
142 yield adjusttokenpos(t._replace(string='b%s' % t.string),
180 if fullname == 'mercurial.pycompat':
143 coloffset)
181 yield t
144 coldelta += 1
182 continue
145 continue
183 r, c = t.start
146
184 l = (b'; from mercurial.pycompat import '
147 # This looks like a function call.
185 b'delattr, getattr, hasattr, setattr, xrange, '
148 if t.type == token.NAME and _isop(i + 1, '('):
186 b'open, unicode\n')
149 fn = t.string
187 for u in tokenize.tokenize(io.BytesIO(l).readline):
150
188 if u.type in (tokenize.ENCODING, token.ENDMARKER):
151 # *attr() builtins don't accept byte strings to 2nd argument.
189 continue
152 if (fn in ('getattr', 'setattr', 'hasattr', 'safehasattr') and
190 yield u._replace(
153 not _isop(i - 1, '.')):
191 start=(r, c + u.start[1]), end=(r, c + u.end[1]))
154 arg1idx = _findargnofcall(1)
155 if arg1idx is not None:
156 _ensuresysstr(arg1idx)
157
158 # .encode() and .decode() on str/bytes/unicode don't accept
159 # byte strings on Python 3.
160 elif fn in ('encode', 'decode') and _isop(i - 1, '.'):
161 for argn in range(2):
162 argidx = _findargnofcall(argn)
163 if argidx is not None:
164 _ensuresysstr(argidx)
165
166 # It changes iteritems/values to items/values as they are not
167 # present in Python 3 world.
168 elif opts['dictiter'] and fn in ('iteritems', 'itervalues'):
169 yield adjusttokenpos(t._replace(string=fn[4:]), coloffset)
192 continue
170 continue
193
171
194 # This looks like a function call.
172 # Emit unmodified token.
195 if t.type == token.NAME and _isop(i + 1, '('):
173 yield adjusttokenpos(t, coloffset)
196 fn = t.string
197
198 # *attr() builtins don't accept byte strings to 2nd argument.
199 if (fn in ('getattr', 'setattr', 'hasattr', 'safehasattr') and
200 not _isop(i - 1, '.')):
201 arg1idx = _findargnofcall(1)
202 if arg1idx is not None:
203 _ensureunicode(arg1idx)
204
205 # .encode() and .decode() on str/bytes/unicode don't accept
206 # byte strings on Python 3.
207 elif fn in ('encode', 'decode') and _isop(i - 1, '.'):
208 for argn in range(2):
209 argidx = _findargnofcall(argn)
210 if argidx is not None:
211 _ensureunicode(argidx)
212
213 # It changes iteritems/values to items/values as they are not
214 # present in Python 3 world.
215 elif fn in ('iteritems', 'itervalues'):
216 yield t._replace(string=fn[4:])
217 continue
218
174
219 # Emit unmodified token.
175 def process(fin, fout, opts):
220 yield t
176 tokens = tokenize.tokenize(fin.readline)
221
177 tokens = replacetokens(list(tokens), opts)
222 # Header to add to bytecode files. This MUST be changed when
178 fout.write(tokenize.untokenize(tokens))
223 # ``replacetoken`` or any mechanism that changes semantics of module
224 # loading is changed. Otherwise cached bytecode may get loaded without
225 # the new transformation mechanisms applied.
226 BYTECODEHEADER = b'HG\x00\x0a'
227
228 class hgloader(importlib.machinery.SourceFileLoader):
229 """Custom module loader that transforms source code.
230
179
231 When the source code is converted to a code object, we transform
180 def tryunlink(fname):
232 certain patterns to be Python 3 compatible. This allows us to write code
181 try:
233 that is natively Python 2 and compatible with Python 3 without
182 os.unlink(fname)
234 making the code excessively ugly.
183 except OSError as err:
235
184 if err.errno != errno.ENOENT:
236 We do this by transforming the token stream between parse and compile.
185 raise
237
238 Implementing transformations invalidates caching assumptions made
239 by the built-in importer. The built-in importer stores a header on
240 saved bytecode files indicating the Python/bytecode version. If the
241 version changes, the cached bytecode is ignored. The Mercurial
242 transformations could change at any time. This means we need to check
243 that cached bytecode was generated with the current transformation
244 code or there could be a mismatch between cached bytecode and what
245 would be generated from this class.
246
186
247 We supplement the bytecode caching layer by wrapping ``get_data``
187 @contextlib.contextmanager
248 and ``set_data``. These functions are called when the
188 def editinplace(fname):
249 ``SourceFileLoader`` retrieves and saves bytecode cache files,
189 n = os.path.basename(fname)
250 respectively. We simply add an additional header on the file. As
190 d = os.path.dirname(fname)
251 long as the version in this file is changed when semantics change,
191 fp = tempfile.NamedTemporaryFile(prefix='.%s-' % n, suffix='~', dir=d,
252 cached bytecode should be invalidated when transformations change.
192 delete=False)
253
193 try:
254 The added header has the form ``HG<VERSION>``. That is a literal
194 yield fp
255 ``HG`` with 2 binary bytes indicating the transformation version.
195 fp.close()
256 """
196 if os.name == 'nt':
257 def get_data(self, path):
197 tryunlink(fname)
258 data = super(hgloader, self).get_data(path)
198 os.rename(fp.name, fname)
259
199 finally:
260 if not path.endswith(tuple(importlib.machinery.BYTECODE_SUFFIXES)):
200 fp.close()
261 return data
201 tryunlink(fp.name)
262
263 # There should be a header indicating the Mercurial transformation
264 # version. If it doesn't exist or doesn't match the current version,
265 # we raise an OSError because that is what
266 # ``SourceFileLoader.get_code()`` expects when loading bytecode
267 # paths to indicate the cached file is "bad."
268 if data[0:2] != b'HG':
269 raise OSError('no hg header')
270 if data[0:4] != BYTECODEHEADER:
271 raise OSError('hg header version mismatch')
272
202
273 return data[4:]
203 def main():
274
204 ap = argparse.ArgumentParser()
275 def set_data(self, path, data, *args, **kwargs):
205 ap.add_argument('-i', '--inplace', action='store_true', default=False,
276 if path.endswith(tuple(importlib.machinery.BYTECODE_SUFFIXES)):
206 help='edit files in place')
277 data = BYTECODEHEADER + data
207 ap.add_argument('--dictiter', action='store_true', default=False,
278
208 help='rewrite iteritems() and itervalues()'),
279 return super(hgloader, self).set_data(path, data, *args, **kwargs)
209 ap.add_argument('files', metavar='FILE', nargs='+', help='source file')
210 args = ap.parse_args()
211 opts = {
212 'dictiter': args.dictiter,
213 }
214 for fname in args.files:
215 if args.inplace:
216 with editinplace(fname) as fout:
217 with open(fname, 'rb') as fin:
218 process(fin, fout, opts)
219 else:
220 with open(fname, 'rb') as fin:
221 fout = sys.stdout.buffer
222 process(fin, fout, opts)
280
223
281 def source_to_code(self, data, path):
224 if __name__ == '__main__':
282 """Perform token transformation before compilation."""
225 main()
283 buf = io.BytesIO(data)
284 tokens = tokenize.tokenize(buf.readline)
285 data = tokenize.untokenize(replacetokens(list(tokens), self.name))
286 # Python's built-in importer strips frames from exceptions raised
287 # for this code. Unfortunately, that mechanism isn't extensible
288 # and our frame will be blamed for the import failure. There
289 # are extremely hacky ways to do frame stripping. We haven't
290 # implemented them because they are very ugly.
291 return super(hgloader, self).source_to_code(data, path)
292
293 # We automagically register our custom importer as a side-effect of
294 # loading. This is necessary to ensure that any entry points are able
295 # to import mercurial.* modules without having to perform this
296 # registration themselves.
297 if not any(isinstance(x, hgpathentryfinder) for x in sys.meta_path):
298 # meta_path is used before any implicit finders and before sys.path.
299 sys.meta_path.insert(0, hgpathentryfinder())
@@ -340,7 +340,8 b' pypats = ['
340 (r'\butil\.Abort\b', "directly use error.Abort"),
340 (r'\butil\.Abort\b', "directly use error.Abort"),
341 (r'^@(\w*\.)?cachefunc', "module-level @cachefunc is risky, please avoid"),
341 (r'^@(\w*\.)?cachefunc', "module-level @cachefunc is risky, please avoid"),
342 (r'^import atexit', "don't use atexit, use ui.atexit"),
342 (r'^import atexit', "don't use atexit, use ui.atexit"),
343 (r'^import Queue', "don't use Queue, use util.queue + util.empty"),
343 (r'^import Queue', "don't use Queue, use pycompat.queue.Queue + "
344 "pycompat.queue.Empty"),
344 (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"),
345 (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"),
345 (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
346 (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
346 (r'^import SocketServer', "don't use SockerServer, use util.socketserver"),
347 (r'^import SocketServer', "don't use SockerServer, use util.socketserver"),
@@ -1,40 +1,81 b''
1 CC = clang
2 CXX = clang++
3
4 all: bdiff mpatch xdiff
5
6 fuzzutil.o: fuzzutil.cc fuzzutil.h
7 $(CXX) $(CXXFLAGS) -g -O1 -fsanitize=fuzzer-no-link,address \
8 -std=c++17 \
9 -I../../mercurial -c -o fuzzutil.o fuzzutil.cc
10
11 fuzzutil-oss-fuzz.o: fuzzutil.cc fuzzutil.h
12 $(CXX) $(CXXFLAGS) -std=c++17 \
13 -I../../mercurial -c -o fuzzutil-oss-fuzz.o fuzzutil.cc
14
1 bdiff.o: ../../mercurial/bdiff.c
15 bdiff.o: ../../mercurial/bdiff.c
2 clang -g -O1 -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
16 $(CC) $(CFLAGS) -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
3 ../../mercurial/bdiff.c
17 ../../mercurial/bdiff.c
4
18
5 bdiff: bdiff.cc bdiff.o
19 bdiff: bdiff.cc bdiff.o fuzzutil.o
6 clang -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
20 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
7 -I../../mercurial bdiff.cc bdiff.o -o bdiff
21 -std=c++17 \
22 -I../../mercurial bdiff.cc bdiff.o fuzzutil.o -o bdiff
8
23
9 bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
24 bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
10 $$CC $$CFLAGS -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
25 $(CC) $(CFLAGS) -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
26
27 bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o
28 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial bdiff.cc \
29 bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
30 $$OUT/bdiff_fuzzer
31
32 mpatch.o: ../../mercurial/mpatch.c
33 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c -o mpatch.o \
34 ../../mercurial/mpatch.c
11
35
12 bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o
36 mpatch: CXXFLAGS += -std=c++17
13 $$CXX $$CXXFLAGS -std=c++11 -I../../mercurial bdiff.cc \
37 mpatch: mpatch.cc mpatch.o fuzzutil.o
14 bdiff-oss-fuzz.o -lFuzzingEngine -o $$OUT/bdiff_fuzzer
38 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
39 -I../../mercurial mpatch.cc mpatch.o fuzzutil.o -o mpatch
40
41 mpatch-oss-fuzz.o: ../../mercurial/mpatch.c
42 $(CC) $(CFLAGS) -c -o mpatch-oss-fuzz.o ../../mercurial/mpatch.c
43
44 mpatch_fuzzer: mpatch.cc mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o
45 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial mpatch.cc \
46 mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
47 $$OUT/mpatch_fuzzer
48
49 mpatch_corpus.zip:
50 python mpatch_corpus.py $$OUT/mpatch_fuzzer_seed_corpus.zip
15
51
16 x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
52 x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
17 clang -g -O1 -fsanitize=fuzzer-no-link,address -c \
53 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c \
18 -o $@ \
54 -o $@ \
19 $<
55 $<
20
56
21 xdiff: xdiff.cc xdiffi.o xprepare.o xutils.o
57 xdiff: CXXFLAGS += -std=c++17
22 clang -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
58 xdiff: xdiff.cc xdiffi.o xprepare.o xutils.o fuzzutil.o
59 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
23 -I../../mercurial xdiff.cc \
60 -I../../mercurial xdiff.cc \
24 xdiffi.o xprepare.o xutils.o -o xdiff
61 xdiffi.o xprepare.o xutils.o fuzzutil.o -o xdiff
25
62
26 fuzz-x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
63 fuzz-x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
27 $$CC $$CFLAGS -c \
64 $(CC) $(CFLAGS) -c \
28 -o $@ \
65 -o $@ \
29 $<
66 $<
30
67
31 xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o
68 xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o
32 $$CXX $$CXXFLAGS -std=c++11 -I../../mercurial xdiff.cc \
69 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial xdiff.cc \
33 fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o \
70 fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o \
34 -lFuzzingEngine -o $$OUT/xdiff_fuzzer
71 -lFuzzingEngine -o $$OUT/xdiff_fuzzer
35
72
36 all: bdiff xdiff
73 clean:
74 $(RM) *.o *_fuzzer \
75 bdiff \
76 mpatch \
77 xdiff
37
78
38 oss-fuzz: bdiff_fuzzer xdiff_fuzzer
79 oss-fuzz: bdiff_fuzzer mpatch_fuzzer mpatch_corpus.zip xdiff_fuzzer
39
80
40 .PHONY: all oss-fuzz
81 .PHONY: all clean oss-fuzz
@@ -6,30 +6,25 b''
6 * This software may be used and distributed according to the terms of
6 * This software may be used and distributed according to the terms of
7 * the GNU General Public License, incorporated herein by reference.
7 * the GNU General Public License, incorporated herein by reference.
8 */
8 */
9 #include <memory>
9 #include <stdlib.h>
10 #include <stdlib.h>
10
11
12 #include "fuzzutil.h"
13
11 extern "C" {
14 extern "C" {
12 #include "bdiff.h"
15 #include "bdiff.h"
13
16
14 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
17 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
15 {
18 {
16 if (!Size) {
19 auto maybe_inputs = SplitInputs(Data, Size);
20 if (!maybe_inputs) {
17 return 0;
21 return 0;
18 }
22 }
19 // figure out a random point in [0, Size] to split our input.
23 auto inputs = std::move(maybe_inputs.value());
20 size_t split = Data[0] / 255.0 * Size;
21
22 // left input to diff is data[1:split]
23 const uint8_t *left = Data + 1;
24 // which has len split-1
25 size_t left_size = split - 1;
26 // right starts at the next byte after left ends
27 const uint8_t *right = left + left_size;
28 size_t right_size = Size - split;
29
24
30 struct bdiff_line *a, *b;
25 struct bdiff_line *a, *b;
31 int an = bdiff_splitlines((const char *)left, split - 1, &a);
26 int an = bdiff_splitlines(inputs.left.get(), inputs.left_size, &a);
32 int bn = bdiff_splitlines((const char *)right, right_size, &b);
27 int bn = bdiff_splitlines(inputs.right.get(), inputs.right_size, &b);
33 struct bdiff_hunk l;
28 struct bdiff_hunk l;
34 bdiff_diff(a, an, b, bn, &l);
29 bdiff_diff(a, an, b, bn, &l);
35 free(a);
30 free(a);
@@ -10,6 +10,8 b''
10 #include <inttypes.h>
10 #include <inttypes.h>
11 #include <stdlib.h>
11 #include <stdlib.h>
12
12
13 #include "fuzzutil.h"
14
13 extern "C" {
15 extern "C" {
14
16
15 int hunk_consumer(long a1, long a2, long b1, long b2, void *priv)
17 int hunk_consumer(long a1, long a2, long b1, long b2, void *priv)
@@ -20,21 +22,17 b' int hunk_consumer(long a1, long a2, long'
20
22
21 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
23 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
22 {
24 {
23 if (!Size) {
25 auto maybe_inputs = SplitInputs(Data, Size);
26 if (!maybe_inputs) {
24 return 0;
27 return 0;
25 }
28 }
26 // figure out a random point in [0, Size] to split our input.
29 auto inputs = std::move(maybe_inputs.value());
27 size_t split = Data[0] / 255.0 * Size;
28
29 mmfile_t a, b;
30 mmfile_t a, b;
30
31
31 // `a` input to diff is data[1:split]
32 a.ptr = inputs.left.get();
32 a.ptr = (char *)Data + 1;
33 a.size = inputs.left_size;
33 // which has len split-1
34 b.ptr = inputs.right.get();
34 a.size = split - 1;
35 b.size = inputs.right_size;
35 // `b` starts at the next byte after `a` ends
36 b.ptr = a.ptr + a.size;
37 b.size = Size - split;
38 xpparam_t xpp = {
36 xpparam_t xpp = {
39 XDF_INDENT_HEURISTIC, /* flags */
37 XDF_INDENT_HEURISTIC, /* flags */
40 };
38 };
@@ -117,9 +117,9 b' def main(argv):'
117 return
117 return
118 with open(opts.versionfile) as f:
118 with open(opts.versionfile) as f:
119 for l in f:
119 for l in f:
120 if l.startswith('version = '):
120 if l.startswith('version = b'):
121 # version number is entire line minus the quotes
121 # version number is entire line minus the quotes
122 ver = l[len('version = ') + 1:-2]
122 ver = l[len('version = b') + 1:-2]
123 break
123 break
124 if opts.paranoid:
124 if opts.paranoid:
125 print(paranoidver(ver))
125 print(paranoidver(ver))
@@ -39,10 +39,14 b' import hgdemandimport ; hgdemandimport.e'
39
39
40 from mercurial import (
40 from mercurial import (
41 dispatch,
41 dispatch,
42 pycompat,
42 ui as uimod,
43 ui as uimod,
43 )
44 )
44
45
45 def main():
46 def main():
47 # Prevent insertion/deletion of CRs
48 dispatch.initstdio()
49
46 cwd = os.getcwd()
50 cwd = os.getcwd()
47 readonly = False
51 readonly = False
48 args = sys.argv[1:]
52 args = sys.argv[1:]
@@ -66,15 +70,15 b' def main():'
66 path = cmdargv[2]
70 path = cmdargv[2]
67 repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
71 repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
68 if repo in allowed_paths:
72 if repo in allowed_paths:
69 cmd = ['-R', repo, 'serve', '--stdio']
73 cmd = [b'-R', pycompat.fsencode(repo), b'serve', b'--stdio']
70 req = dispatch.request(cmd)
74 req = dispatch.request(cmd)
71 if readonly:
75 if readonly:
72 if not req.ui:
76 if not req.ui:
73 req.ui = uimod.ui.load()
77 req.ui = uimod.ui.load()
74 req.ui.setconfig('hooks', 'pretxnopen.hg-ssh',
78 req.ui.setconfig(b'hooks', b'pretxnopen.hg-ssh',
75 'python:__main__.rejectpush', 'hg-ssh')
79 b'python:__main__.rejectpush', b'hg-ssh')
76 req.ui.setconfig('hooks', 'prepushkey.hg-ssh',
80 req.ui.setconfig(b'hooks', b'prepushkey.hg-ssh',
77 'python:__main__.rejectpush', 'hg-ssh')
81 b'python:__main__.rejectpush', b'hg-ssh')
78 dispatch.dispatch(req)
82 dispatch.dispatch(req)
79 else:
83 else:
80 sys.stderr.write('Illegal repository "%s"\n' % repo)
84 sys.stderr.write('Illegal repository "%s"\n' % repo)
@@ -84,7 +88,7 b' def main():'
84 sys.exit(255)
88 sys.exit(255)
85
89
86 def rejectpush(ui, **kwargs):
90 def rejectpush(ui, **kwargs):
87 ui.warn(("Permission denied\n"))
91 ui.warn((b"Permission denied\n"))
88 # mercurial hooks use unix process conventions for hook return values
92 # mercurial hooks use unix process conventions for hook return values
89 # so a truthy return means failure
93 # so a truthy return means failure
90 return True
94 return True
@@ -1,323 +1,144 b''
1 # If you want to change PREFIX, do not just edit it below. The changed
1 $(eval HGROOT := $(shell cd ../..; pwd))
2 # value wont get passed on to recursive make calls. You should instead
2
3 # override the variable on the command like:
3 DEBIAN_CODENAMES := \
4 #
4 jessie \
5 # % make PREFIX=/opt/ install
5 stretch \
6 buster
7
8 UBUNTU_CODENAMES := \
9 trusty \
10 xenial \
11 artful \
12 bionic \
6
13
7 export PREFIX=/usr/local
14 FEDORA_RELEASES := \
8 PYTHON=python
15 20 \
9 $(eval HGROOT := $(shell pwd))
16 21 \
10 HGPYTHONS ?= $(HGROOT)/build/pythons
17 28
11 PURE=
12 PYFILES:=$(shell find mercurial hgext doc -name '*.py')
13 DOCFILES=mercurial/help/*.txt
14 export LANGUAGE=C
15 export LC_ALL=C
16 TESTFLAGS ?= $(shell echo $$HGTESTFLAGS)
17 OSXVERSIONFLAGS ?= $(shell echo $$OSXVERSIONFLAGS)
18
18
19 # Set this to e.g. "mingw32" to use a non-default compiler.
19 CENTOS_RELEASES := \
20 COMPILER=
20 5 \
21 6 \
22 7
21
23
22 COMPILERFLAG_tmp_ =
24 # Build a Python for these CentOS releases.
23 COMPILERFLAG_tmp_${COMPILER} ?= -c $(COMPILER)
25 CENTOS_WITH_PYTHON_RELEASES := 5 6
24 COMPILERFLAG=${COMPILERFLAG_tmp_${COMPILER}}
25
26
26 help:
27 help:
27 @echo 'Commonly used make targets:'
28 @echo 'Packaging Make Targets'
28 @echo ' all - build program and documentation'
29 @echo ''
29 @echo ' install - install program and man pages to $$PREFIX ($(PREFIX))'
30 @echo 'docker-centos{$(strip $(CENTOS_RELEASES))}'
30 @echo ' install-home - install with setup.py install --home=$$HOME ($(HOME))'
31 @echo ' Build an RPM for a specific CentOS version using Docker.'
31 @echo ' local - build for inplace usage'
32 @echo ''
32 @echo ' tests - run all tests in the automatic test suite'
33 @echo 'docker-debian-{$(strip $(DEBIAN_CODENAMES))}'
33 @echo ' test-foo - run only specified tests (e.g. test-merge1.t)'
34 @echo ' Build Debian packages specific to a Debian distro using Docker.'
34 @echo ' dist - run all tests and create a source tarball in dist/'
35 @echo ''
35 @echo ' clean - remove files created by other targets'
36 @echo 'docker-fedora{$(strip $(FEDORA_RELEASES))}'
36 @echo ' (except installed files or dist source tarball)'
37 @echo ' Build an RPM for a specific Fedora version using Docker.'
37 @echo ' update-pot - update i18n/hg.pot'
38 @echo ''
38 @echo
39 @echo 'docker-ubuntu-{$(strip $(UBUNTU_CODENAMES))}'
39 @echo 'Example for a system-wide installation under /usr/local:'
40 @echo ' Build Debian package specific to an Ubuntu distro using Docker.'
40 @echo ' make all && su -c "make install" && hg version'
41 @echo ''
41 @echo
42 @echo 'docker-ubuntu-{$(strip $(UBUNTU_CODENAMES))}-ppa'
42 @echo 'Example for a local installation (usable in this directory):'
43 @echo ' Build a source-only Debian package specific to an Ubuntu distro'
43 @echo ' make local && ./hg version'
44 @echo ' using Docker.'
44
45 @echo ''
45 all: build doc
46 @echo 'linux-wheels'
46
47 @echo ' Build Linux manylinux wheels using Docker.'
47 local:
48 @echo ''
48 $(PYTHON) setup.py $(PURE) \
49 @echo 'linux-wheels-{x86_64, i686}'
49 build_py -c -d . \
50 @echo ' Build Linux manylinux wheels for a specific architecture using Docker'
50 build_ext $(COMPILERFLAG) -i \
51 @echo ''
51 build_hgexe $(COMPILERFLAG) -i \
52 @echo 'deb'
52 build_mo
53 @echo ' Build a Debian package locally targeting the current system'
53 env HGRCPATH= $(PYTHON) hg version
54 @echo ''
54
55 @echo 'ppa'
55 build:
56 @echo ' Build a Debian source package locally targeting the current system'
56 $(PYTHON) setup.py $(PURE) build $(COMPILERFLAG)
57 @echo ''
57
58 @echo 'centos{$(strip $(CENTOS_RELEASES))}'
58 wheel:
59 @echo ' Build an RPM for a specific CentOS version locally'
59 FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILERFLAG)
60 @echo ''
60
61 @echo 'fedora{$(strip $(FEDORA_RELEASES))}'
61 doc:
62 @echo ' Build an RPM for a specific Fedora version locally'
62 $(MAKE) -C doc
63
64 cleanbutpackages:
65 -$(PYTHON) setup.py clean --all # ignore errors from this command
66 find contrib doc hgext hgext3rd i18n mercurial tests hgdemandimport \
67 \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
68 rm -f MANIFEST MANIFEST.in hgext/__index__.py tests/*.err
69 rm -f mercurial/__modulepolicy__.py
70 if test -d .hg; then rm -f mercurial/__version__.py; fi
71 rm -rf build mercurial/locale
72 $(MAKE) -C doc clean
73 $(MAKE) -C contrib/chg distclean
74
75 clean: cleanbutpackages
76 rm -rf packages
77
78 install: install-bin install-doc
79
80 install-bin: build
81 $(PYTHON) setup.py $(PURE) install --root="$(DESTDIR)/" --prefix="$(PREFIX)" --force
82
83 install-doc: doc
84 cd doc && $(MAKE) $(MFLAGS) install
85
86 install-home: install-home-bin install-home-doc
87
88 install-home-bin: build
89 $(PYTHON) setup.py $(PURE) install --home="$(HOME)" --prefix="" --force
90
63
91 install-home-doc: doc
64 .PHONY: help
92 cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install
93
94 MANIFEST-doc:
95 $(MAKE) -C doc MANIFEST
96
97 MANIFEST.in: MANIFEST-doc
98 hg manifest | sed -e 's/^/include /' > MANIFEST.in
99 echo include mercurial/__version__.py >> MANIFEST.in
100 sed -e 's/^/include /' < doc/MANIFEST >> MANIFEST.in
101
102 dist: tests dist-notests
103
104 dist-notests: doc MANIFEST.in
105 TAR_OPTIONS="--owner=root --group=root --mode=u+w,go-w,a+rX-s" $(PYTHON) setup.py -q sdist
106
65
107 check: tests
66 .PHONY: deb
108
67 deb:
109 tests:
68 ./builddeb
110 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS)
111
112 test-%:
113 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS) $@
114
115 testpy-%:
116 @echo Looking for Python $* in $(HGPYTHONS)
117 [ -e $(HGPYTHONS)/$*/bin/python ] || ( \
118 cd $$(mktemp --directory --tmpdir) && \
119 $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
120 cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
121
122 check-code:
123 hg manifest | xargs python contrib/check-code.py
124
69
125 format-c:
70 .PHONY: ppa
126 clang-format --style file -i \
71 ppa:
127 `hg files 'set:(**.c or **.cc or **.h) and not "listfile:contrib/clang-format-ignorelist"'`
72 ./builddeb --source-only
128
129 update-pot: i18n/hg.pot
130
73
131 i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n/posplit i18n/hggettext
74 # Debian targets.
132 $(PYTHON) i18n/hggettext mercurial/commands.py \
75 define debian_targets =
133 hgext/*.py hgext/*/__init__.py \
76 .PHONY: docker-debian-$(1)
134 mercurial/fileset.py mercurial/revset.py \
77 docker-debian-$(1):
135 mercurial/templatefilters.py \
78 ./dockerdeb debian $(1)
136 mercurial/templatefuncs.py \
79
137 mercurial/templatekw.py \
80 endef
138 mercurial/filemerge.py \
139 mercurial/hgweb/webcommands.py \
140 mercurial/util.py \
141 $(DOCFILES) > i18n/hg.pot.tmp
142 # All strings marked for translation in Mercurial contain
143 # ASCII characters only. But some files contain string
144 # literals like this '\037\213'. xgettext thinks it has to
145 # parse them even though they are not marked for translation.
146 # Extracting with an explicit encoding of ISO-8859-1 will make
147 # xgettext "parse" and ignore them.
148 echo $(PYFILES) | xargs \
149 xgettext --package-name "Mercurial" \
150 --msgid-bugs-address "<mercurial-devel@mercurial-scm.org>" \
151 --copyright-holder "Matt Mackall <mpm@selenic.com> and others" \
152 --from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \
153 -d hg -p i18n -o hg.pot.tmp
154 $(PYTHON) i18n/posplit i18n/hg.pot.tmp
155 # The target file is not created before the last step. So it never is in
156 # an intermediate state.
157 mv -f i18n/hg.pot.tmp i18n/hg.pot
158
81
159 %.po: i18n/hg.pot
82 $(foreach codename,$(DEBIAN_CODENAMES),$(eval $(call debian_targets,$(codename))))
160 # work on a temporary copy for never having a half completed target
161 cp $@ $@.tmp
162 msgmerge --no-location --update $@.tmp $^
163 mv -f $@.tmp $@
164
83
165 # Packaging targets
84 # Ubuntu targets.
85 define ubuntu_targets =
86 .PHONY: docker-ubuntu-$(1)
87 docker-ubuntu-$(1):
88 ./dockerdeb ubuntu $(1)
89
90 .PHONY: docker-ubuntu-$(1)-ppa
91 docker-ubuntu-$(1)-ppa:
92 ./dockerdeb ubuntu $(1) --source-only
166
93
167 osx:
94 endef
168 rm -rf build/mercurial
95
169 /usr/bin/python2.7 setup.py install --optimize=1 \
96 $(foreach codename,$(UBUNTU_CODENAMES),$(eval $(call ubuntu_targets,$(codename))))
170 --root=build/mercurial/ --prefix=/usr/local/ \
171 --install-lib=/Library/Python/2.7/site-packages/
172 make -C doc all install DESTDIR="$(PWD)/build/mercurial/"
173 # Place a bogon .DS_Store file in the target dir so we can be
174 # sure it doesn't get included in the final package.
175 touch build/mercurial/.DS_Store
176 # install zsh completions - this location appears to be
177 # searched by default as of macOS Sierra.
178 install -d build/mercurial/usr/local/share/zsh/site-functions/
179 install -m 0644 contrib/zsh_completion build/mercurial/usr/local/share/zsh/site-functions/_hg
180 # install bash completions - there doesn't appear to be a
181 # place that's searched by default for bash, so we'll follow
182 # the lead of Apple's git install and just put it in a
183 # location of our own.
184 install -d build/mercurial/usr/local/hg/contrib/
185 install -m 0644 contrib/bash_completion build/mercurial/usr/local/hg/contrib/hg-completion.bash
186 make -C contrib/chg \
187 HGPATH=/usr/local/bin/hg \
188 PYTHON=/usr/bin/python2.7 \
189 HGEXTDIR=/Library/Python/2.7/site-packages/hgext \
190 DESTDIR=../../build/mercurial \
191 PREFIX=/usr/local \
192 clean install
193 mkdir -p $${OUTPUTDIR:-dist}
194 HGVER=$$(python contrib/genosxversion.py $(OSXVERSIONFLAGS) build/mercurial/Library/Python/2.7/site-packages/mercurial/__version__.py) && \
195 OSXVER=$$(sw_vers -productVersion | cut -d. -f1,2) && \
196 pkgbuild --filter \\.DS_Store --root build/mercurial/ \
197 --identifier org.mercurial-scm.mercurial \
198 --version "$${HGVER}" \
199 build/mercurial.pkg && \
200 productbuild --distribution contrib/macosx/distribution.xml \
201 --package-path build/ \
202 --version "$${HGVER}" \
203 --resources contrib/macosx/ \
204 "$${OUTPUTDIR:-dist/}"/Mercurial-"$${HGVER}"-macosx"$${OSXVER}".pkg
205
97
206 deb:
98 # Fedora targets.
207 contrib/builddeb
99 define fedora_targets
208
100 .PHONY: fedora$(1)
209 ppa:
101 fedora$(1):
210 contrib/builddeb --source-only
102 mkdir -p $$(HGROOT)/packages/fedora$(1)
211
103 ./buildrpm
212 contrib/docker/debian-%: contrib/docker/debian.template
104 cp $$(HGROOT)/contrib/packaging/rpmbuild/RPMS/*/* $$(HGROOT)/packages/fedora$(1)
213 sed "s/__CODENAME__/$*/" $< > $@
105 cp $$(HGROOT)/contrib/packaging/rpmbuild/SRPMS/* $$(HGROOT)/packages/fedora$(1)
214
106 rm -rf $(HGROOT)/rpmbuild
215 docker-debian-jessie: contrib/docker/debian-jessie
216 contrib/dockerdeb debian jessie
217
218 docker-debian-stretch: contrib/docker/debian-stretch
219 contrib/dockerdeb debian stretch
220
221 contrib/docker/ubuntu-%: contrib/docker/ubuntu.template
222 sed "s/__CODENAME__/$*/" $< > $@
223
224 docker-ubuntu-trusty: contrib/docker/ubuntu-trusty
225 contrib/dockerdeb ubuntu trusty
226
227 docker-ubuntu-trusty-ppa: contrib/docker/ubuntu-trusty
228 contrib/dockerdeb ubuntu trusty --source-only
229
107
230 docker-ubuntu-xenial: contrib/docker/ubuntu-xenial
108 .PHONY: docker-fedora$(1)
231 contrib/dockerdeb ubuntu xenial
109 docker-fedora$(1):
232
110 mkdir -p $$(HGROOT)/packages/fedora$(1)
233 docker-ubuntu-xenial-ppa: contrib/docker/ubuntu-xenial
111 ./dockerrpm fedora$(1)
234 contrib/dockerdeb ubuntu xenial --source-only
235
236 docker-ubuntu-artful: contrib/docker/ubuntu-artful
237 contrib/dockerdeb ubuntu artful
238
239 docker-ubuntu-artful-ppa: contrib/docker/ubuntu-artful
240 contrib/dockerdeb ubuntu artful --source-only
241
112
242 docker-ubuntu-bionic: contrib/docker/ubuntu-bionic
113 endef
243 contrib/dockerdeb ubuntu bionic
244
245 docker-ubuntu-bionic-ppa: contrib/docker/ubuntu-bionic
246 contrib/dockerdeb ubuntu bionic --source-only
247
114
248 fedora20:
115 $(foreach release,$(FEDORA_RELEASES),$(eval $(call fedora_targets,$(release))))
249 mkdir -p packages/fedora20
250 contrib/buildrpm
251 cp rpmbuild/RPMS/*/* packages/fedora20
252 cp rpmbuild/SRPMS/* packages/fedora20
253 rm -rf rpmbuild
254
255 docker-fedora20:
256 mkdir -p packages/fedora20
257 contrib/dockerrpm fedora20
258
116
259 fedora21:
117 # CentOS targets.
260 mkdir -p packages/fedora21
118 define centos_targets
261 contrib/buildrpm
119 .PHONY: centos$(1)
262 cp rpmbuild/RPMS/*/* packages/fedora21
120 centos$(1):
263 cp rpmbuild/SRPMS/* packages/fedora21
121 mkdir -p $$(HGROOT)/packages/centos$(1)
264 rm -rf rpmbuild
122 ./buildrpm $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython)
265
123 cp $$(HGROOT)/rpmbuild/RPMS/*/* $$(HGROOT)/packages/centos$(1)
266 docker-fedora21:
124 cp $$(HGROOT)/rpmbuild/SRPMS/* $$(HGROOT)/packages/centos$(1)
267 mkdir -p packages/fedora21
268 contrib/dockerrpm fedora21
269
270 centos5:
271 mkdir -p packages/centos5
272 contrib/buildrpm --withpython
273 cp rpmbuild/RPMS/*/* packages/centos5
274 cp rpmbuild/SRPMS/* packages/centos5
275
276 docker-centos5:
277 mkdir -p packages/centos5
278 contrib/dockerrpm centos5 --withpython
279
125
280 centos6:
126 .PHONY: docker-centos$(1)
281 mkdir -p packages/centos6
127 docker-centos$(1):
282 contrib/buildrpm --withpython
128 mkdir -p $$(HGROOT)/packages/centos$(1)
283 cp rpmbuild/RPMS/*/* packages/centos6
129 ./dockerrpm centos$(1) $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython)
284 cp rpmbuild/SRPMS/* packages/centos6
285
286 docker-centos6:
287 mkdir -p packages/centos6
288 contrib/dockerrpm centos6 --withpython
289
130
290 centos7:
131 endef
291 mkdir -p packages/centos7
292 contrib/buildrpm
293 cp rpmbuild/RPMS/*/* packages/centos7
294 cp rpmbuild/SRPMS/* packages/centos7
295
132
296 docker-centos7:
133 $(foreach release,$(CENTOS_RELEASES),$(eval $(call centos_targets,$(release))))
297 mkdir -p packages/centos7
298 contrib/dockerrpm centos7
299
134
135 .PHONY: linux-wheels
300 linux-wheels: linux-wheels-x86_64 linux-wheels-i686
136 linux-wheels: linux-wheels-x86_64 linux-wheels-i686
301
137
138 .PHONY: linux-wheels-x86_64
302 linux-wheels-x86_64:
139 linux-wheels-x86_64:
303 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`:/src quay.io/pypa/manylinux1_x86_64 /src/contrib/build-linux-wheels.sh
140 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`/../..:/src quay.io/pypa/manylinux1_x86_64 /src/contrib/packaging/build-linux-wheels.sh
304
305 linux-wheels-i686:
306 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`:/src quay.io/pypa/manylinux1_i686 linux32 /src/contrib/build-linux-wheels.sh
307
141
308 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
142 .PHONY: linux-wheels-i686
309 install-doc install-home install-home-bin install-home-doc \
143 linux-wheels-i686:
310 dist dist-notests check tests check-code format-c update-pot \
144 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`/../..:/src quay.io/pypa/manylinux1_i686 linux32 /src/contrib/packaging/build-linux-wheels.sh
311 osx deb ppa \
312 docker-debian-jessie \
313 docker-debian-stretch \
314 docker-ubuntu-trusty docker-ubuntu-trusty-ppa \
315 docker-ubuntu-xenial docker-ubuntu-xenial-ppa \
316 docker-ubuntu-artful docker-ubuntu-artful-ppa \
317 docker-ubuntu-bionic docker-ubuntu-bionic-ppa \
318 fedora20 docker-fedora20 \
319 fedora21 docker-fedora21 \
320 centos5 docker-centos5 \
321 centos6 docker-centos6 \
322 centos7 docker-centos7 \
323 linux-wheels
@@ -30,5 +30,5 b' for PYBIN in $PYTHON_TARGETS; do'
30 # Install mercurial wheel as root
30 # Install mercurial wheel as root
31 "${PYBIN}/pip" install mercurial --no-index -f /src/wheelhouse
31 "${PYBIN}/pip" install mercurial --no-index -f /src/wheelhouse
32 # But run tests as hgbuilder user (non-root)
32 # But run tests as hgbuilder user (non-root)
33 su hgbuilder -c "\"${PYBIN}/python\" /io/tests/run-tests.py --with-hg=\"${PYBIN}/hg\" --blacklist=/io/contrib/linux-wheel-centos5-blacklist"
33 su hgbuilder -c "\"${PYBIN}/python\" /io/tests/run-tests.py --with-hg=\"${PYBIN}/hg\" --blacklist=/io/contrib/packaging/linux-wheel-centos5-blacklist"
34 done
34 done
@@ -6,6 +6,8 b''
6
6
7 . $(dirname $0)/packagelib.sh
7 . $(dirname $0)/packagelib.sh
8
8
9 ROOTDIR=$(cd $(dirname $0)/../.. > /dev/null; pwd)
10
9 BUILD=1
11 BUILD=1
10 CLEANUP=1
12 CLEANUP=1
11 DISTID=`(lsb_release -is 2> /dev/null | tr '[:upper:]' '[:lower:]') || echo debian`
13 DISTID=`(lsb_release -is 2> /dev/null | tr '[:upper:]' '[:lower:]') || echo debian`
@@ -73,7 +75,7 b' if [ "$BUILD" ]; then'
73 exit 1
75 exit 1
74 fi
76 fi
75
77
76 cp -r "$PWD"/contrib/debian debian
78 cp -r "$ROOTDIR"/contrib/packaging/debian debian
77
79
78 sed -i.tmp "s/__VERSION__/$debver/" $changelog
80 sed -i.tmp "s/__VERSION__/$debver/" $changelog
79 sed -i.tmp "s/__DATE__/$(date --rfc-2822)/" $changelog
81 sed -i.tmp "s/__DATE__/$(date --rfc-2822)/" $changelog
@@ -82,7 +84,7 b' if [ "$BUILD" ]; then'
82
84
83 # remove the node from the version string
85 # remove the node from the version string
84 SRCFILE="mercurial_$(echo $debver | sed "s,-$node,,").orig.tar.gz"
86 SRCFILE="mercurial_$(echo $debver | sed "s,-$node,,").orig.tar.gz"
85 "$PWD/hg" archive $SRCFILE
87 "$ROOTDIR/hg" archive $SRCFILE
86 mv $SRCFILE ..
88 mv $SRCFILE ..
87 debuild -us -uc -i -I $DEBFLAGS
89 debuild -us -uc -i -I $DEBFLAGS
88 if [ $? != 0 ]; then
90 if [ $? != 0 ]; then
@@ -35,9 +35,9 b' while [ "$1" ]; do'
35 esac
35 esac
36 done
36 done
37
37
38 cd "`dirname $0`/.."
38 cd "`dirname $0`/../.."
39
39
40 specfile=$PWD/contrib/mercurial.spec
40 specfile=$PWD/contrib/packaging/mercurial.spec
41 if [ ! -f $specfile ]; then
41 if [ ! -f $specfile ]; then
42 echo "Cannot find $specfile!" 1>&2
42 echo "Cannot find $specfile!" 1>&2
43 exit 1
43 exit 1
1 NO CONTENT: file renamed from contrib/debian/cacerts.rc to contrib/packaging/debian/cacerts.rc
NO CONTENT: file renamed from contrib/debian/cacerts.rc to contrib/packaging/debian/cacerts.rc
1 NO CONTENT: file renamed from contrib/debian/changelog to contrib/packaging/debian/changelog
NO CONTENT: file renamed from contrib/debian/changelog to contrib/packaging/debian/changelog
1 NO CONTENT: file renamed from contrib/debian/compat to contrib/packaging/debian/compat
NO CONTENT: file renamed from contrib/debian/compat to contrib/packaging/debian/compat
1 NO CONTENT: file renamed from contrib/debian/control to contrib/packaging/debian/control
NO CONTENT: file renamed from contrib/debian/control to contrib/packaging/debian/control
1 NO CONTENT: file renamed from contrib/debian/copyright to contrib/packaging/debian/copyright
NO CONTENT: file renamed from contrib/debian/copyright to contrib/packaging/debian/copyright
1 NO CONTENT: file renamed from contrib/debian/default-tools.rc to contrib/packaging/debian/default-tools.rc
NO CONTENT: file renamed from contrib/debian/default-tools.rc to contrib/packaging/debian/default-tools.rc
1 NO CONTENT: file renamed from contrib/debian/hgkpath.rc to contrib/packaging/debian/hgkpath.rc
NO CONTENT: file renamed from contrib/debian/hgkpath.rc to contrib/packaging/debian/hgkpath.rc
@@ -35,7 +35,7 b' override_dh_install:'
35 mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/mercurial
35 mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/mercurial
36 cp contrib/hgk "$(CURDIR)"/debian/mercurial-common/usr/share/mercurial
36 cp contrib/hgk "$(CURDIR)"/debian/mercurial-common/usr/share/mercurial
37 mkdir -p "$(CURDIR)"/debian/mercurial-common/etc/mercurial/hgrc.d/
37 mkdir -p "$(CURDIR)"/debian/mercurial-common/etc/mercurial/hgrc.d/
38 cp contrib/debian/*.rc "$(CURDIR)"/debian/mercurial-common/etc/mercurial/hgrc.d/
38 cp contrib/packaging/debian/*.rc "$(CURDIR)"/debian/mercurial-common/etc/mercurial/hgrc.d/
39 # completions
39 # completions
40 mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/bash-completion/completions
40 mkdir -p "$(CURDIR)"/debian/mercurial-common/usr/share/bash-completion/completions
41 cp contrib/bash_completion "$(CURDIR)"/debian/mercurial-common/usr/share/bash-completion/completions/hg
41 cp contrib/bash_completion "$(CURDIR)"/debian/mercurial-common/usr/share/bash-completion/completions/hg
@@ -1,4 +1,8 b''
1 FROM centos:centos5
1 FROM centos:centos5
2
3 RUN groupadd -g 1000 build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
5
2 RUN \
6 RUN \
3 sed -i 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo && \
7 sed -i 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo && \
4 sed -i 's/^#\(baseurl=\)http:\/\/mirror.centos.org\/centos/\1http:\/\/vault.centos.org/' /etc/yum.repos.d/*.repo && \
8 sed -i 's/^#\(baseurl=\)http:\/\/mirror.centos.org\/centos/\1http:\/\/vault.centos.org/' /etc/yum.repos.d/*.repo && \
@@ -1,4 +1,8 b''
1 FROM centos:centos6
1 FROM centos:centos6
2
3 RUN groupadd -g 1000 build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
5
2 RUN yum install -y \
6 RUN yum install -y \
3 gcc \
7 gcc \
4 gettext \
8 gettext \
@@ -1,4 +1,8 b''
1 FROM centos:centos7
1 FROM centos:centos7
2
3 RUN groupadd -g 1000 build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
5
2 RUN yum install -y \
6 RUN yum install -y \
3 gcc \
7 gcc \
4 gettext \
8 gettext \
@@ -1,4 +1,8 b''
1 FROM debian:__CODENAME__
1 FROM debian:%CODENAME%
2
3 RUN groupadd -g 1000 build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
5
2 RUN apt-get update && apt-get install -y \
6 RUN apt-get update && apt-get install -y \
3 build-essential \
7 build-essential \
4 debhelper \
8 debhelper \
@@ -1,4 +1,8 b''
1 FROM fedora:20
1 FROM fedora:20
2
3 RUN groupadd -g 1000 build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
5
2 RUN yum install -y \
6 RUN yum install -y \
3 gcc \
7 gcc \
4 gettext \
8 gettext \
@@ -1,4 +1,8 b''
1 FROM fedora:21
1 FROM fedora:21
2
3 RUN groupadd -g 1000 build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
5
2 RUN yum install -y \
6 RUN yum install -y \
3 gcc \
7 gcc \
4 gettext \
8 gettext \
@@ -1,4 +1,8 b''
1 FROM ubuntu:__CODENAME__
1 FROM ubuntu:%CODENAME%
2
3 RUN groupadd -g 1000 build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
5
2 RUN apt-get update && apt-get install -y \
6 RUN apt-get update && apt-get install -y \
3 build-essential \
7 build-essential \
4 debhelper \
8 debhelper \
@@ -1,12 +1,9 b''
1 #!/bin/bash -eu
1 #!/bin/bash -eu
2
2
3 . $(dirname $0)/dockerlib.sh
4 . $(dirname $0)/packagelib.sh
3 . $(dirname $0)/packagelib.sh
5
4
6 BUILDDIR=$(dirname $0)
5 BUILDDIR=$(dirname $0)
7 export ROOTDIR=$(cd $BUILDDIR/.. > /dev/null; pwd)
6 export ROOTDIR=$(cd $BUILDDIR/../.. > /dev/null; pwd)
8
9 checkdocker
10
7
11 DISTID="$1"
8 DISTID="$1"
12 CODENAME="$2"
9 CODENAME="$2"
@@ -14,21 +11,29 b' PLATFORM="$1-$2"'
14 shift; shift # extra params are passed to build process
11 shift; shift # extra params are passed to build process
15
12
16 OUTPUTDIR=${OUTPUTDIR:=$ROOTDIR/packages/$PLATFORM}
13 OUTPUTDIR=${OUTPUTDIR:=$ROOTDIR/packages/$PLATFORM}
14 CONTAINER=hg-docker-$PLATFORM
17
15
18 initcontainer $PLATFORM
16 DOCKER=$($BUILDDIR/hg-docker docker-path)
17
18 $BUILDDIR/hg-docker build \
19 --build-arg CODENAME=$CODENAME \
20 $BUILDDIR/docker/$DISTID.template \
21 $CONTAINER
19
22
20 # debuild only appears to be able to save built debs etc to .., so we
23 # debuild only appears to be able to save built debs etc to .., so we
21 # have to share the .. of the current directory with the docker
24 # have to share the .. of the current directory with the docker
22 # container and hope it's writable. Whee.
25 # container and hope it's writable. Whee.
23 dn=$(basename $PWD)
26 dn=$(basename $ROOTDIR)
27
28 DBUILDUSER=build
24
29
25 if [ $(uname) = "Darwin" ] ; then
30 if [ $(uname) = "Darwin" ] ; then
26 $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
31 $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
27 sh -c "cd /mnt/$dn && make clean && make local"
32 sh -c "cd /mnt/$dn && make clean && make local"
28 fi
33 fi
29 $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
34 $DOCKER run -u $DBUILDUSER --rm -v $ROOTDIR/..:/mnt $CONTAINER \
30 sh -c "cd /mnt/$dn && DEB_BUILD_OPTIONS='${DEB_BUILD_OPTIONS:=}' contrib/builddeb --build --distid $DISTID --codename $CODENAME $@"
35 sh -c "cd /mnt/$dn && DEB_BUILD_OPTIONS='${DEB_BUILD_OPTIONS:=}' contrib/packaging/builddeb --build --distid $DISTID --codename $CODENAME $@"
31 contrib/builddeb --cleanup --distid $DISTID --codename $CODENAME
36 contrib/packaging/builddeb --cleanup --distid $DISTID --codename $CODENAME
32 if [ $(uname) = "Darwin" ] ; then
37 if [ $(uname) = "Darwin" ] ; then
33 $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
38 $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
34 sh -c "cd /mnt/$dn && make clean"
39 sh -c "cd /mnt/$dn && make clean"
@@ -1,21 +1,23 b''
1 #!/bin/bash -e
1 #!/bin/bash -e
2
2
3 . $(dirname $0)/dockerlib.sh
4
5 BUILDDIR=$(dirname $0)
3 BUILDDIR=$(dirname $0)
6 export ROOTDIR=$(cd $BUILDDIR/..; pwd)
4 export ROOTDIR=$(cd $BUILDDIR/../..; pwd)
7
8 checkdocker
9
5
10 PLATFORM="$1"
6 PLATFORM="$1"
11 shift # extra params are passed to buildrpm
7 shift # extra params are passed to buildrpm
12
8
13 initcontainer $PLATFORM
9 DOCKER=$($BUILDDIR/hg-docker docker-path)
10
11 CONTAINER=hg-docker-$PLATFORM
12
13 $BUILDDIR/hg-docker build $BUILDDIR/docker/$PLATFORM $CONTAINER
14
14
15 RPMBUILDDIR=$ROOTDIR/packages/$PLATFORM
15 RPMBUILDDIR=$ROOTDIR/packages/$PLATFORM
16 contrib/buildrpm --rpmbuilddir $RPMBUILDDIR --prepare $*
16 $ROOTDIR/contrib/packaging/buildrpm --rpmbuilddir $RPMBUILDDIR --prepare $*
17
17
18 DSHARED=/mnt/shared
18 DSHARED=/mnt/shared
19 DBUILDUSER=build
20
19 $DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
21 $DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
20 rpmbuild --define "_topdir $DSHARED" -ba $DSHARED/SPECS/mercurial.spec --clean
22 rpmbuild --define "_topdir $DSHARED" -ba $DSHARED/SPECS/mercurial.spec --clean
21
23
1 NO CONTENT: file renamed from contrib/linux-wheel-centos5-blacklist to contrib/packaging/linux-wheel-centos5-blacklist
NO CONTENT: file renamed from contrib/linux-wheel-centos5-blacklist to contrib/packaging/linux-wheel-centos5-blacklist
1 NO CONTENT: file renamed from contrib/macosx/Readme.html to contrib/packaging/macosx/Readme.html
NO CONTENT: file renamed from contrib/macosx/Readme.html to contrib/packaging/macosx/Readme.html
1 NO CONTENT: file renamed from contrib/macosx/Welcome.html to contrib/packaging/macosx/Welcome.html
NO CONTENT: file renamed from contrib/macosx/Welcome.html to contrib/packaging/macosx/Welcome.html
@@ -4,7 +4,7 b''
4 <organization>org.mercurial-scm</organization>
4 <organization>org.mercurial-scm</organization>
5 <options customize="never" require-scripts="false" rootVolumeOnly="true" />
5 <options customize="never" require-scripts="false" rootVolumeOnly="true" />
6 <welcome file="Welcome.html" mime-type="text/html" />
6 <welcome file="Welcome.html" mime-type="text/html" />
7 <license file="../../COPYING" mime-type="text/plain" />
7 <license file="../../../COPYING" mime-type="text/plain" />
8 <readme file="Readme.html" mime-type="text/html" />
8 <readme file="Readme.html" mime-type="text/html" />
9 <pkg-ref id="org.mercurial-scm.mercurial"
9 <pkg-ref id="org.mercurial-scm.mercurial"
10 version="0"
10 version="0"
1 NO CONTENT: file renamed from contrib/mercurial.spec to contrib/packaging/mercurial.spec
NO CONTENT: file renamed from contrib/mercurial.spec to contrib/packaging/mercurial.spec
1 NO CONTENT: file renamed from contrib/packagelib.sh to contrib/packaging/packagelib.sh
NO CONTENT: file renamed from contrib/packagelib.sh to contrib/packaging/packagelib.sh
@@ -71,6 +71,25 b' except (ImportError, AttributeError):'
71 import inspect
71 import inspect
72 getargspec = inspect.getargspec
72 getargspec = inspect.getargspec
73
73
74 try:
75 # 4.7+
76 queue = pycompat.queue.Queue
77 except (AttributeError, ImportError):
78 # <4.7.
79 try:
80 queue = pycompat.queue
81 except (AttributeError, ImportError):
82 queue = util.queue
83
84 try:
85 from mercurial import logcmdutil
86 makelogtemplater = logcmdutil.maketemplater
87 except (AttributeError, ImportError):
88 try:
89 makelogtemplater = cmdutil.makelogtemplater
90 except (AttributeError, ImportError):
91 makelogtemplater = None
92
74 # for "historical portability":
93 # for "historical portability":
75 # define util.safehasattr forcibly, because util.safehasattr has been
94 # define util.safehasattr forcibly, because util.safehasattr has been
76 # available since 1.9.3 (or 94b200a11cf7)
95 # available since 1.9.3 (or 94b200a11cf7)
@@ -159,6 +178,9 b' try:'
159 configitem('perf', 'parentscount',
178 configitem('perf', 'parentscount',
160 default=mercurial.configitems.dynamicdefault,
179 default=mercurial.configitems.dynamicdefault,
161 )
180 )
181 configitem('perf', 'all-timing',
182 default=mercurial.configitems.dynamicdefault,
183 )
162 except (ImportError, AttributeError):
184 except (ImportError, AttributeError):
163 pass
185 pass
164
186
@@ -228,12 +250,15 b' def gettimer(ui, opts=None):'
228 # experimental config: perf.stub
250 # experimental config: perf.stub
229 if ui.configbool("perf", "stub", False):
251 if ui.configbool("perf", "stub", False):
230 return functools.partial(stub_timer, fm), fm
252 return functools.partial(stub_timer, fm), fm
231 return functools.partial(_timer, fm), fm
253
254 # experimental config: perf.all-timing
255 displayall = ui.configbool("perf", "all-timing", False)
256 return functools.partial(_timer, fm, displayall=displayall), fm
232
257
233 def stub_timer(fm, func, title=None):
258 def stub_timer(fm, func, title=None):
234 func()
259 func()
235
260
236 def _timer(fm, func, title=None):
261 def _timer(fm, func, title=None, displayall=False):
237 gc.collect()
262 gc.collect()
238 results = []
263 results = []
239 begin = util.timer()
264 begin = util.timer()
@@ -258,14 +283,27 b' def _timer(fm, func, title=None):'
258 fm.write('title', '! %s\n', title)
283 fm.write('title', '! %s\n', title)
259 if r:
284 if r:
260 fm.write('result', '! result: %s\n', r)
285 fm.write('result', '! result: %s\n', r)
261 m = min(results)
286 def display(role, entry):
262 fm.plain('!')
287 prefix = ''
263 fm.write('wall', ' wall %f', m[0])
288 if role != 'best':
264 fm.write('comb', ' comb %f', m[1] + m[2])
289 prefix = '%s.' % role
265 fm.write('user', ' user %f', m[1])
290 fm.plain('!')
266 fm.write('sys', ' sys %f', m[2])
291 fm.write(prefix + 'wall', ' wall %f', entry[0])
267 fm.write('count', ' (best of %d)', count)
292 fm.write(prefix + 'comb', ' comb %f', entry[1] + entry[2])
268 fm.plain('\n')
293 fm.write(prefix + 'user', ' user %f', entry[1])
294 fm.write(prefix + 'sys', ' sys %f', entry[2])
295 fm.write(prefix + 'count', ' (%s of %d)', role, count)
296 fm.plain('\n')
297 results.sort()
298 min_val = results[0]
299 display('best', min_val)
300 if displayall:
301 max_val = results[-1]
302 display('max', max_val)
303 avg = tuple([sum(x) / count for x in zip(*results)])
304 display('avg', avg)
305 median = results[len(results) // 2]
306 display('median', median)
269
307
270 # utilities for historical portability
308 # utilities for historical portability
271
309
@@ -755,6 +793,10 b' def perfphases(ui, repo, **opts):'
755
793
756 @command('perfmanifest', [], 'REV')
794 @command('perfmanifest', [], 'REV')
757 def perfmanifest(ui, repo, rev, **opts):
795 def perfmanifest(ui, repo, rev, **opts):
796 """benchmark the time to read a manifest from disk and return a usable
797 dict-like object
798
799 Manifest caches are cleared before retrieval."""
758 timer, fm = gettimer(ui, opts)
800 timer, fm = gettimer(ui, opts)
759 ctx = scmutil.revsingle(repo, rev, rev)
801 ctx = scmutil.revsingle(repo, rev, rev)
760 t = ctx.manifestnode()
802 t = ctx.manifestnode()
@@ -887,16 +929,36 b' def perfmoonwalk(ui, repo, **opts):'
887 timer(moonwalk)
929 timer(moonwalk)
888 fm.end()
930 fm.end()
889
931
890 @command('perftemplating', formatteropts)
932 @command('perftemplating',
891 def perftemplating(ui, repo, rev=None, **opts):
933 [('r', 'rev', [], 'revisions to run the template on'),
892 if rev is None:
934 ] + formatteropts)
893 rev=[]
935 def perftemplating(ui, repo, testedtemplate=None, **opts):
936 """test the rendering time of a given template"""
937 if makelogtemplater is None:
938 raise error.Abort(("perftemplating not available with this Mercurial"),
939 hint="use 4.3 or later")
940
941 nullui = ui.copy()
942 nullui.fout = open(os.devnull, 'wb')
943 nullui.disablepager()
944 revs = opts.get('rev')
945 if not revs:
946 revs = ['all()']
947 revs = list(scmutil.revrange(repo, revs))
948
949 defaulttemplate = ('{date|shortdate} [{rev}:{node|short}]'
950 ' {author|person}: {desc|firstline}\n')
951 if testedtemplate is None:
952 testedtemplate = defaulttemplate
953 displayer = makelogtemplater(nullui, repo, testedtemplate)
954 def format():
955 for r in revs:
956 ctx = repo[r]
957 displayer.show(ctx)
958 displayer.flush(ctx)
959
894 timer, fm = gettimer(ui, opts)
960 timer, fm = gettimer(ui, opts)
895 ui.pushbuffer()
961 timer(format)
896 timer(lambda: commands.log(ui, repo, rev=rev, date='', user='',
897 template='{date|shortdate} [{rev}:{node|short}]'
898 ' {author|person}: {desc|firstline}\n'))
899 ui.popbuffer()
900 fm.end()
962 fm.end()
901
963
902 @command('perfcca', formatteropts)
964 @command('perfcca', formatteropts)
@@ -918,9 +980,10 b' def perffncacheload(ui, repo, **opts):'
918 def perffncachewrite(ui, repo, **opts):
980 def perffncachewrite(ui, repo, **opts):
919 timer, fm = gettimer(ui, opts)
981 timer, fm = gettimer(ui, opts)
920 s = repo.store
982 s = repo.store
983 lock = repo.lock()
921 s.fncache._load()
984 s.fncache._load()
922 lock = repo.lock()
923 tr = repo.transaction('perffncachewrite')
985 tr = repo.transaction('perffncachewrite')
986 tr.addbackup('fncache')
924 def d():
987 def d():
925 s.fncache._dirty = True
988 s.fncache._dirty = True
926 s.fncache.write(tr)
989 s.fncache.write(tr)
@@ -1029,7 +1092,7 b' def perfbdiff(ui, repo, file_, rev=None,'
1029 else:
1092 else:
1030 mdiff.textdiff(*pair)
1093 mdiff.textdiff(*pair)
1031 else:
1094 else:
1032 q = util.queue()
1095 q = queue()
1033 for i in xrange(threads):
1096 for i in xrange(threads):
1034 q.put(None)
1097 q.put(None)
1035 ready = threading.Condition()
1098 ready = threading.Condition()
@@ -21,10 +21,6 b' Config::'
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # API token. Get it from https://$HOST/conduit/login/
25 # Deprecated: see [phabricator.auth] below
26 #token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
27
28 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
29 # callsign is "FOO".
25 # callsign is "FOO".
30 callsign = FOO
26 callsign = FOO
@@ -35,10 +31,12 b' Config::'
35 # the internal library.
31 # the internal library.
36 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
37
33
38 [phabricator.auth]
34 [auth]
39 example.url = https://phab.example.com/
35 example.schemes = https
36 example.prefix = phab.example.com
37
40 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
41 example.token = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
42 """
40 """
43
41
44 from __future__ import absolute_import
42 from __future__ import absolute_import
@@ -55,6 +53,7 b' from mercurial import ('
55 context,
53 context,
56 encoding,
54 encoding,
57 error,
55 error,
56 httpconnection as httpconnectionmod,
58 mdiff,
57 mdiff,
59 obsutil,
58 obsutil,
60 parser,
59 parser,
@@ -74,13 +73,37 b' from mercurial.utils import ('
74 cmdtable = {}
73 cmdtable = {}
75 command = registrar.command(cmdtable)
74 command = registrar.command(cmdtable)
76
75
76 configtable = {}
77 configitem = registrar.configitem(configtable)
78
79 # developer config: phabricator.batchsize
80 configitem(b'phabricator', b'batchsize',
81 default=12,
82 )
83 configitem(b'phabricator', b'callsign',
84 default=None,
85 )
86 configitem(b'phabricator', b'curlcmd',
87 default=None,
88 )
89 # developer config: phabricator.repophid
90 configitem(b'phabricator', b'repophid',
91 default=None,
92 )
93 configitem(b'phabricator', b'url',
94 default=None,
95 )
96 configitem(b'phabsend', b'confirm',
97 default=False,
98 )
99
77 colortable = {
100 colortable = {
78 'phabricator.action.created': 'green',
101 b'phabricator.action.created': b'green',
79 'phabricator.action.skipped': 'magenta',
102 b'phabricator.action.skipped': b'magenta',
80 'phabricator.action.updated': 'magenta',
103 b'phabricator.action.updated': b'magenta',
81 'phabricator.desc': '',
104 b'phabricator.desc': b'',
82 'phabricator.drev': 'bold',
105 b'phabricator.drev': b'bold',
83 'phabricator.node': '',
106 b'phabricator.node': b'',
84 }
107 }
85
108
86 def urlencodenested(params):
109 def urlencodenested(params):
@@ -98,59 +121,69 b' def urlencodenested(params):'
98 else:
121 else:
99 for k, v in items(obj):
122 for k, v in items(obj):
100 if prefix:
123 if prefix:
101 process('%s[%s]' % (prefix, k), v)
124 process(b'%s[%s]' % (prefix, k), v)
102 else:
125 else:
103 process(k, v)
126 process(k, v)
104 process('', params)
127 process(b'', params)
105 return util.urlreq.urlencode(flatparams)
128 return util.urlreq.urlencode(flatparams)
106
129
107 printed_token_warning = False
130 printed_token_warning = False
108
131
109 def readlegacytoken(repo):
132 def readlegacytoken(repo, url):
110 """Transitional support for old phabricator tokens.
133 """Transitional support for old phabricator tokens.
111
134
112 Remove before the 4.6 release.
135 Remove before the 4.7 release.
113 """
136 """
137 groups = {}
138 for key, val in repo.ui.configitems(b'phabricator.auth'):
139 if b'.' not in key:
140 repo.ui.warn(_(b"ignoring invalid [phabricator.auth] key '%s'\n")
141 % key)
142 continue
143 group, setting = key.rsplit(b'.', 1)
144 groups.setdefault(group, {})[setting] = val
145
146 token = None
147 for group, auth in groups.iteritems():
148 if url != auth.get(b'url'):
149 continue
150 token = auth.get(b'token')
151 if token:
152 break
153
114 global printed_token_warning
154 global printed_token_warning
115 token = repo.ui.config('phabricator', 'token')
155
116 if token and not printed_token_warning:
156 if token and not printed_token_warning:
117 printed_token_warning = True
157 printed_token_warning = True
118 repo.ui.warn(_('phabricator.token is deprecated - please '
158 repo.ui.warn(_(b'phabricator.auth.token is deprecated - please '
119 'migrate to the phabricator.auth section.\n'))
159 b'migrate to auth.phabtoken.\n'))
120 return token
160 return token
121
161
122 def readurltoken(repo):
162 def readurltoken(repo):
123 """return conduit url, token and make sure they exist
163 """return conduit url, token and make sure they exist
124
164
125 Currently read from [phabricator] config section. In the future, it might
165 Currently read from [auth] config section. In the future, it might
126 make sense to read from .arcconfig and .arcrc as well.
166 make sense to read from .arcconfig and .arcrc as well.
127 """
167 """
128 url = repo.ui.config('phabricator', 'url')
168 url = repo.ui.config(b'phabricator', b'url')
129 if not url:
169 if not url:
130 raise error.Abort(_('config %s.%s is required')
170 raise error.Abort(_(b'config %s.%s is required')
131 % ('phabricator', 'url'))
171 % (b'phabricator', b'url'))
132
172
133 groups = {}
173 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
134 for key, val in repo.ui.configitems('phabricator.auth'):
174 token = None
135 if '.' not in key:
136 repo.ui.warn(_("ignoring invalid [phabricator.auth] key '%s'\n")
137 % key)
138 continue
139 group, setting = key.rsplit('.', 1)
140 groups.setdefault(group, {})[setting] = val
141
175
142 token = None
176 if res:
143 for group, auth in groups.iteritems():
177 group, auth = res
144 if url != auth.get('url'):
178
145 continue
179 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
146 token = auth.get('token')
180
147 if token:
181 token = auth.get(b'phabtoken')
148 break
149
182
150 if not token:
183 if not token:
151 token = readlegacytoken(repo)
184 token = readlegacytoken(repo, url)
152 if not token:
185 if not token:
153 raise error.Abort(_('Can\'t find conduit token associated to %s')
186 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
154 % (url,))
187 % (url,))
155
188
156 return url, token
189 return url, token
@@ -158,14 +191,14 b' def readurltoken(repo):'
158 def callconduit(repo, name, params):
191 def callconduit(repo, name, params):
159 """call Conduit API, params is a dict. return json.loads result, or None"""
192 """call Conduit API, params is a dict. return json.loads result, or None"""
160 host, token = readurltoken(repo)
193 host, token = readurltoken(repo)
161 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo()
194 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
162 repo.ui.debug('Conduit Call: %s %s\n' % (url, params))
195 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
163 params = params.copy()
196 params = params.copy()
164 params['api.token'] = token
197 params[b'api.token'] = token
165 data = urlencodenested(params)
198 data = urlencodenested(params)
166 curlcmd = repo.ui.config('phabricator', 'curlcmd')
199 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
167 if curlcmd:
200 if curlcmd:
168 sin, sout = procutil.popen2('%s -d @- %s'
201 sin, sout = procutil.popen2(b'%s -d @- %s'
169 % (curlcmd, procutil.shellquote(url)))
202 % (curlcmd, procutil.shellquote(url)))
170 sin.write(data)
203 sin.write(data)
171 sin.close()
204 sin.close()
@@ -174,15 +207,15 b' def callconduit(repo, name, params):'
174 urlopener = urlmod.opener(repo.ui, authinfo)
207 urlopener = urlmod.opener(repo.ui, authinfo)
175 request = util.urlreq.request(url, data=data)
208 request = util.urlreq.request(url, data=data)
176 body = urlopener.open(request).read()
209 body = urlopener.open(request).read()
177 repo.ui.debug('Conduit Response: %s\n' % body)
210 repo.ui.debug(b'Conduit Response: %s\n' % body)
178 parsed = json.loads(body)
211 parsed = json.loads(body)
179 if parsed.get(r'error_code'):
212 if parsed.get(r'error_code'):
180 msg = (_('Conduit Error (%s): %s')
213 msg = (_(b'Conduit Error (%s): %s')
181 % (parsed[r'error_code'], parsed[r'error_info']))
214 % (parsed[r'error_code'], parsed[r'error_info']))
182 raise error.Abort(msg)
215 raise error.Abort(msg)
183 return parsed[r'result']
216 return parsed[r'result']
184
217
185 @command('debugcallconduit', [], _('METHOD'))
218 @command(b'debugcallconduit', [], _(b'METHOD'))
186 def debugcallconduit(ui, repo, name):
219 def debugcallconduit(ui, repo, name):
187 """call Conduit API
220 """call Conduit API
188
221
@@ -191,29 +224,29 b' def debugcallconduit(ui, repo, name):'
191 """
224 """
192 params = json.loads(ui.fin.read())
225 params = json.loads(ui.fin.read())
193 result = callconduit(repo, name, params)
226 result = callconduit(repo, name, params)
194 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
227 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
195 ui.write('%s\n' % s)
228 ui.write(b'%s\n' % s)
196
229
197 def getrepophid(repo):
230 def getrepophid(repo):
198 """given callsign, return repository PHID or None"""
231 """given callsign, return repository PHID or None"""
199 # developer config: phabricator.repophid
232 # developer config: phabricator.repophid
200 repophid = repo.ui.config('phabricator', 'repophid')
233 repophid = repo.ui.config(b'phabricator', b'repophid')
201 if repophid:
234 if repophid:
202 return repophid
235 return repophid
203 callsign = repo.ui.config('phabricator', 'callsign')
236 callsign = repo.ui.config(b'phabricator', b'callsign')
204 if not callsign:
237 if not callsign:
205 return None
238 return None
206 query = callconduit(repo, 'diffusion.repository.search',
239 query = callconduit(repo, b'diffusion.repository.search',
207 {'constraints': {'callsigns': [callsign]}})
240 {b'constraints': {b'callsigns': [callsign]}})
208 if len(query[r'data']) == 0:
241 if len(query[r'data']) == 0:
209 return None
242 return None
210 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
243 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
211 repo.ui.setconfig('phabricator', 'repophid', repophid)
244 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
212 return repophid
245 return repophid
213
246
214 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z')
247 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
215 _differentialrevisiondescre = re.compile(
248 _differentialrevisiondescre = re.compile(
216 '^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
249 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
217
250
218 def getoldnodedrevmap(repo, nodelist):
251 def getoldnodedrevmap(repo, nodelist):
219 """find previous nodes that has been sent to Phabricator
252 """find previous nodes that has been sent to Phabricator
@@ -254,16 +287,16 b' def getoldnodedrevmap(repo, nodelist):'
254 # Check commit message
287 # Check commit message
255 m = _differentialrevisiondescre.search(ctx.description())
288 m = _differentialrevisiondescre.search(ctx.description())
256 if m:
289 if m:
257 toconfirm[node] = (1, set(precnodes), int(m.group('id')))
290 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
258
291
259 # Double check if tags are genuine by collecting all old nodes from
292 # Double check if tags are genuine by collecting all old nodes from
260 # Phabricator, and expect precursors overlap with it.
293 # Phabricator, and expect precursors overlap with it.
261 if toconfirm:
294 if toconfirm:
262 drevs = [drev for force, precs, drev in toconfirm.values()]
295 drevs = [drev for force, precs, drev in toconfirm.values()]
263 alldiffs = callconduit(unfi, 'differential.querydiffs',
296 alldiffs = callconduit(unfi, b'differential.querydiffs',
264 {'revisionIDs': drevs})
297 {b'revisionIDs': drevs})
265 getnode = lambda d: bin(encoding.unitolocal(
298 getnode = lambda d: bin(encoding.unitolocal(
266 getdiffmeta(d).get(r'node', ''))) or None
299 getdiffmeta(d).get(r'node', b''))) or None
267 for newnode, (force, precset, drev) in toconfirm.items():
300 for newnode, (force, precset, drev) in toconfirm.items():
268 diffs = [d for d in alldiffs.values()
301 diffs = [d for d in alldiffs.values()
269 if int(d[r'revisionID']) == drev]
302 if int(d[r'revisionID']) == drev]
@@ -274,11 +307,11 b' def getoldnodedrevmap(repo, nodelist):'
274 # Ignore if precursors (Phabricator and local repo) do not overlap,
307 # Ignore if precursors (Phabricator and local repo) do not overlap,
275 # and force is not set (when commit message says nothing)
308 # and force is not set (when commit message says nothing)
276 if not force and not bool(phprecset & precset):
309 if not force and not bool(phprecset & precset):
277 tagname = 'D%d' % drev
310 tagname = b'D%d' % drev
278 tags.tag(repo, tagname, nullid, message=None, user=None,
311 tags.tag(repo, tagname, nullid, message=None, user=None,
279 date=None, local=True)
312 date=None, local=True)
280 unfi.ui.warn(_('D%s: local tag removed - does not match '
313 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
281 'Differential history\n') % drev)
314 b'Differential history\n') % drev)
282 continue
315 continue
283
316
284 # Find the last node using Phabricator metadata, and make sure it
317 # Find the last node using Phabricator metadata, and make sure it
@@ -307,40 +340,40 b' def creatediff(ctx):'
307 repo = ctx.repo()
340 repo = ctx.repo()
308 repophid = getrepophid(repo)
341 repophid = getrepophid(repo)
309 # Create a "Differential Diff" via "differential.createrawdiff" API
342 # Create a "Differential Diff" via "differential.createrawdiff" API
310 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
343 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
311 if repophid:
344 if repophid:
312 params['repositoryPHID'] = repophid
345 params[b'repositoryPHID'] = repophid
313 diff = callconduit(repo, 'differential.createrawdiff', params)
346 diff = callconduit(repo, b'differential.createrawdiff', params)
314 if not diff:
347 if not diff:
315 raise error.Abort(_('cannot create diff for %s') % ctx)
348 raise error.Abort(_(b'cannot create diff for %s') % ctx)
316 return diff
349 return diff
317
350
318 def writediffproperties(ctx, diff):
351 def writediffproperties(ctx, diff):
319 """write metadata to diff so patches could be applied losslessly"""
352 """write metadata to diff so patches could be applied losslessly"""
320 params = {
353 params = {
321 'diff_id': diff[r'id'],
354 b'diff_id': diff[r'id'],
322 'name': 'hg:meta',
355 b'name': b'hg:meta',
323 'data': json.dumps({
356 b'data': json.dumps({
324 'user': ctx.user(),
357 b'user': ctx.user(),
325 'date': '%d %d' % ctx.date(),
358 b'date': b'%d %d' % ctx.date(),
326 'node': ctx.hex(),
359 b'node': ctx.hex(),
327 'parent': ctx.p1().hex(),
360 b'parent': ctx.p1().hex(),
328 }),
361 }),
329 }
362 }
330 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
363 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
331
364
332 params = {
365 params = {
333 'diff_id': diff[r'id'],
366 b'diff_id': diff[r'id'],
334 'name': 'local:commits',
367 b'name': b'local:commits',
335 'data': json.dumps({
368 b'data': json.dumps({
336 ctx.hex(): {
369 ctx.hex(): {
337 'author': stringutil.person(ctx.user()),
370 b'author': stringutil.person(ctx.user()),
338 'authorEmail': stringutil.email(ctx.user()),
371 b'authorEmail': stringutil.email(ctx.user()),
339 'time': ctx.date()[0],
372 b'time': ctx.date()[0],
340 },
373 },
341 }),
374 }),
342 }
375 }
343 callconduit(ctx.repo(), 'differential.setdiffproperty', params)
376 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
344
377
345 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
378 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
346 olddiff=None, actions=None):
379 olddiff=None, actions=None):
@@ -365,7 +398,7 b' def createdifferentialrevision(ctx, revi'
365 transactions = []
398 transactions = []
366 if neednewdiff:
399 if neednewdiff:
367 diff = creatediff(ctx)
400 diff = creatediff(ctx)
368 transactions.append({'type': 'update', 'value': diff[r'phid']})
401 transactions.append({b'type': b'update', b'value': diff[r'phid']})
369 else:
402 else:
370 # Even if we don't need to upload a new diff because the patch content
403 # Even if we don't need to upload a new diff because the patch content
371 # does not change. We might still need to update its metadata so
404 # does not change. We might still need to update its metadata so
@@ -379,52 +412,52 b' def createdifferentialrevision(ctx, revi'
379 # existing revision (revid is not None) since that introduces visible
412 # existing revision (revid is not None) since that introduces visible
380 # churns (someone edited "Summary" twice) on the web page.
413 # churns (someone edited "Summary" twice) on the web page.
381 if parentrevid and revid is None:
414 if parentrevid and revid is None:
382 summary = 'Depends on D%s' % parentrevid
415 summary = b'Depends on D%s' % parentrevid
383 transactions += [{'type': 'summary', 'value': summary},
416 transactions += [{b'type': b'summary', b'value': summary},
384 {'type': 'summary', 'value': ' '}]
417 {b'type': b'summary', b'value': b' '}]
385
418
386 if actions:
419 if actions:
387 transactions += actions
420 transactions += actions
388
421
389 # Parse commit message and update related fields.
422 # Parse commit message and update related fields.
390 desc = ctx.description()
423 desc = ctx.description()
391 info = callconduit(repo, 'differential.parsecommitmessage',
424 info = callconduit(repo, b'differential.parsecommitmessage',
392 {'corpus': desc})
425 {b'corpus': desc})
393 for k, v in info[r'fields'].items():
426 for k, v in info[r'fields'].items():
394 if k in ['title', 'summary', 'testPlan']:
427 if k in [b'title', b'summary', b'testPlan']:
395 transactions.append({'type': k, 'value': v})
428 transactions.append({b'type': k, b'value': v})
396
429
397 params = {'transactions': transactions}
430 params = {b'transactions': transactions}
398 if revid is not None:
431 if revid is not None:
399 # Update an existing Differential Revision
432 # Update an existing Differential Revision
400 params['objectIdentifier'] = revid
433 params[b'objectIdentifier'] = revid
401
434
402 revision = callconduit(repo, 'differential.revision.edit', params)
435 revision = callconduit(repo, b'differential.revision.edit', params)
403 if not revision:
436 if not revision:
404 raise error.Abort(_('cannot create revision for %s') % ctx)
437 raise error.Abort(_(b'cannot create revision for %s') % ctx)
405
438
406 return revision, diff
439 return revision, diff
407
440
408 def userphids(repo, names):
441 def userphids(repo, names):
409 """convert user names to PHIDs"""
442 """convert user names to PHIDs"""
410 query = {'constraints': {'usernames': names}}
443 query = {b'constraints': {b'usernames': names}}
411 result = callconduit(repo, 'user.search', query)
444 result = callconduit(repo, b'user.search', query)
412 # username not found is not an error of the API. So check if we have missed
445 # username not found is not an error of the API. So check if we have missed
413 # some names here.
446 # some names here.
414 data = result[r'data']
447 data = result[r'data']
415 resolved = set(entry[r'fields'][r'username'] for entry in data)
448 resolved = set(entry[r'fields'][r'username'] for entry in data)
416 unresolved = set(names) - resolved
449 unresolved = set(names) - resolved
417 if unresolved:
450 if unresolved:
418 raise error.Abort(_('unknown username: %s')
451 raise error.Abort(_(b'unknown username: %s')
419 % ' '.join(sorted(unresolved)))
452 % b' '.join(sorted(unresolved)))
420 return [entry[r'phid'] for entry in data]
453 return [entry[r'phid'] for entry in data]
421
454
422 @command('phabsend',
455 @command(b'phabsend',
423 [('r', 'rev', [], _('revisions to send'), _('REV')),
456 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
424 ('', 'amend', True, _('update commit messages')),
457 (b'', b'amend', True, _(b'update commit messages')),
425 ('', 'reviewer', [], _('specify reviewers')),
458 (b'', b'reviewer', [], _(b'specify reviewers')),
426 ('', 'confirm', None, _('ask for confirmation before sending'))],
459 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
427 _('REV [OPTIONS]'))
460 _(b'REV [OPTIONS]'))
428 def phabsend(ui, repo, *revs, **opts):
461 def phabsend(ui, repo, *revs, **opts):
429 """upload changesets to Phabricator
462 """upload changesets to Phabricator
430
463
@@ -452,29 +485,29 b' def phabsend(ui, repo, *revs, **opts):'
452 phabsend will check obsstore and the above association to decide whether to
485 phabsend will check obsstore and the above association to decide whether to
453 update an existing Differential Revision, or create a new one.
486 update an existing Differential Revision, or create a new one.
454 """
487 """
455 revs = list(revs) + opts.get('rev', [])
488 revs = list(revs) + opts.get(b'rev', [])
456 revs = scmutil.revrange(repo, revs)
489 revs = scmutil.revrange(repo, revs)
457
490
458 if not revs:
491 if not revs:
459 raise error.Abort(_('phabsend requires at least one changeset'))
492 raise error.Abort(_(b'phabsend requires at least one changeset'))
460 if opts.get('amend'):
493 if opts.get(b'amend'):
461 cmdutil.checkunfinished(repo)
494 cmdutil.checkunfinished(repo)
462
495
463 # {newnode: (oldnode, olddiff, olddrev}
496 # {newnode: (oldnode, olddiff, olddrev}
464 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
497 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
465
498
466 confirm = ui.configbool('phabsend', 'confirm')
499 confirm = ui.configbool(b'phabsend', b'confirm')
467 confirm |= bool(opts.get('confirm'))
500 confirm |= bool(opts.get(b'confirm'))
468 if confirm:
501 if confirm:
469 confirmed = _confirmbeforesend(repo, revs, oldmap)
502 confirmed = _confirmbeforesend(repo, revs, oldmap)
470 if not confirmed:
503 if not confirmed:
471 raise error.Abort(_('phabsend cancelled'))
504 raise error.Abort(_(b'phabsend cancelled'))
472
505
473 actions = []
506 actions = []
474 reviewers = opts.get('reviewer', [])
507 reviewers = opts.get(b'reviewer', [])
475 if reviewers:
508 if reviewers:
476 phids = userphids(repo, reviewers)
509 phids = userphids(repo, reviewers)
477 actions.append({'type': 'reviewers.add', 'value': phids})
510 actions.append({b'type': b'reviewers.add', b'value': phids})
478
511
479 drevids = [] # [int]
512 drevids = [] # [int]
480 diffmap = {} # {newnode: diff}
513 diffmap = {} # {newnode: diff}
@@ -483,54 +516,54 b' def phabsend(ui, repo, *revs, **opts):'
483 # can provide dependency relationship
516 # can provide dependency relationship
484 lastrevid = None
517 lastrevid = None
485 for rev in revs:
518 for rev in revs:
486 ui.debug('sending rev %d\n' % rev)
519 ui.debug(b'sending rev %d\n' % rev)
487 ctx = repo[rev]
520 ctx = repo[rev]
488
521
489 # Get Differential Revision ID
522 # Get Differential Revision ID
490 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
523 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
491 if oldnode != ctx.node() or opts.get('amend'):
524 if oldnode != ctx.node() or opts.get(b'amend'):
492 # Create or update Differential Revision
525 # Create or update Differential Revision
493 revision, diff = createdifferentialrevision(
526 revision, diff = createdifferentialrevision(
494 ctx, revid, lastrevid, oldnode, olddiff, actions)
527 ctx, revid, lastrevid, oldnode, olddiff, actions)
495 diffmap[ctx.node()] = diff
528 diffmap[ctx.node()] = diff
496 newrevid = int(revision[r'object'][r'id'])
529 newrevid = int(revision[r'object'][r'id'])
497 if revid:
530 if revid:
498 action = 'updated'
531 action = b'updated'
499 else:
532 else:
500 action = 'created'
533 action = b'created'
501
534
502 # Create a local tag to note the association, if commit message
535 # Create a local tag to note the association, if commit message
503 # does not have it already
536 # does not have it already
504 m = _differentialrevisiondescre.search(ctx.description())
537 m = _differentialrevisiondescre.search(ctx.description())
505 if not m or int(m.group('id')) != newrevid:
538 if not m or int(m.group(b'id')) != newrevid:
506 tagname = 'D%d' % newrevid
539 tagname = b'D%d' % newrevid
507 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
540 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
508 date=None, local=True)
541 date=None, local=True)
509 else:
542 else:
510 # Nothing changed. But still set "newrevid" so the next revision
543 # Nothing changed. But still set "newrevid" so the next revision
511 # could depend on this one.
544 # could depend on this one.
512 newrevid = revid
545 newrevid = revid
513 action = 'skipped'
546 action = b'skipped'
514
547
515 actiondesc = ui.label(
548 actiondesc = ui.label(
516 {'created': _('created'),
549 {b'created': _(b'created'),
517 'skipped': _('skipped'),
550 b'skipped': _(b'skipped'),
518 'updated': _('updated')}[action],
551 b'updated': _(b'updated')}[action],
519 'phabricator.action.%s' % action)
552 b'phabricator.action.%s' % action)
520 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev')
553 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
521 nodedesc = ui.label(bytes(ctx), 'phabricator.node')
554 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
522 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc')
555 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
523 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
556 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
524 desc))
557 desc))
525 drevids.append(newrevid)
558 drevids.append(newrevid)
526 lastrevid = newrevid
559 lastrevid = newrevid
527
560
528 # Update commit messages and remove tags
561 # Update commit messages and remove tags
529 if opts.get('amend'):
562 if opts.get(b'amend'):
530 unfi = repo.unfiltered()
563 unfi = repo.unfiltered()
531 drevs = callconduit(repo, 'differential.query', {'ids': drevids})
564 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
532 with repo.wlock(), repo.lock(), repo.transaction('phabsend'):
565 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
533 wnode = unfi['.'].node()
566 wnode = unfi[b'.'].node()
534 mapping = {} # {oldnode: [newnode]}
567 mapping = {} # {oldnode: [newnode]}
535 for i, rev in enumerate(revs):
568 for i, rev in enumerate(revs):
536 old = unfi[rev]
569 old = unfi[rev]
@@ -546,23 +579,25 b' def phabsend(ui, repo, *revs, **opts):'
546 new = context.metadataonlyctx(
579 new = context.metadataonlyctx(
547 repo, old, parents=parents, text=newdesc,
580 repo, old, parents=parents, text=newdesc,
548 user=old.user(), date=old.date(), extra=old.extra())
581 user=old.user(), date=old.date(), extra=old.extra())
582
549 newnode = new.commit()
583 newnode = new.commit()
584
550 mapping[old.node()] = [newnode]
585 mapping[old.node()] = [newnode]
551 # Update diff property
586 # Update diff property
552 writediffproperties(unfi[newnode], diffmap[old.node()])
587 writediffproperties(unfi[newnode], diffmap[old.node()])
553 # Remove local tags since it's no longer necessary
588 # Remove local tags since it's no longer necessary
554 tagname = 'D%d' % drevid
589 tagname = b'D%d' % drevid
555 if tagname in repo.tags():
590 if tagname in repo.tags():
556 tags.tag(repo, tagname, nullid, message=None, user=None,
591 tags.tag(repo, tagname, nullid, message=None, user=None,
557 date=None, local=True)
592 date=None, local=True)
558 scmutil.cleanupnodes(repo, mapping, 'phabsend')
593 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
559 if wnode in mapping:
594 if wnode in mapping:
560 unfi.setparents(mapping[wnode][0])
595 unfi.setparents(mapping[wnode][0])
561
596
562 # Map from "hg:meta" keys to header understood by "hg import". The order is
597 # Map from "hg:meta" keys to header understood by "hg import". The order is
563 # consistent with "hg export" output.
598 # consistent with "hg export" output.
564 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'),
599 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
565 (r'node', 'Node ID'), (r'parent', 'Parent ')])
600 (r'node', b'Node ID'), (r'parent', b'Parent ')])
566
601
567 def _confirmbeforesend(repo, revs, oldmap):
602 def _confirmbeforesend(repo, revs, oldmap):
568 url, token = readurltoken(repo)
603 url, token = readurltoken(repo)
@@ -572,68 +607,69 b' def _confirmbeforesend(repo, revs, oldma'
572 desc = ctx.description().splitlines()[0]
607 desc = ctx.description().splitlines()[0]
573 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
608 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
574 if drevid:
609 if drevid:
575 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev')
610 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
576 else:
611 else:
577 drevdesc = ui.label(_('NEW'), 'phabricator.drev')
612 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
578
613
579 ui.write(_('%s - %s: %s\n') % (drevdesc,
614 ui.write(_(b'%s - %s: %s\n')
580 ui.label(bytes(ctx), 'phabricator.node'),
615 % (drevdesc,
581 ui.label(desc, 'phabricator.desc')))
616 ui.label(bytes(ctx), b'phabricator.node'),
617 ui.label(desc, b'phabricator.desc')))
582
618
583 if ui.promptchoice(_('Send the above changes to %s (yn)?'
619 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
584 '$$ &Yes $$ &No') % url):
620 b'$$ &Yes $$ &No') % url):
585 return False
621 return False
586
622
587 return True
623 return True
588
624
589 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed',
625 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
590 'abandoned'}
626 b'abandoned'}
591
627
592 def _getstatusname(drev):
628 def _getstatusname(drev):
593 """get normalized status name from a Differential Revision"""
629 """get normalized status name from a Differential Revision"""
594 return drev[r'statusName'].replace(' ', '').lower()
630 return drev[r'statusName'].replace(b' ', b'').lower()
595
631
596 # Small language to specify differential revisions. Support symbols: (), :X,
632 # Small language to specify differential revisions. Support symbols: (), :X,
597 # +, and -.
633 # +, and -.
598
634
599 _elements = {
635 _elements = {
600 # token-type: binding-strength, primary, prefix, infix, suffix
636 # token-type: binding-strength, primary, prefix, infix, suffix
601 '(': (12, None, ('group', 1, ')'), None, None),
637 b'(': (12, None, (b'group', 1, b')'), None, None),
602 ':': (8, None, ('ancestors', 8), None, None),
638 b':': (8, None, (b'ancestors', 8), None, None),
603 '&': (5, None, None, ('and_', 5), None),
639 b'&': (5, None, None, (b'and_', 5), None),
604 '+': (4, None, None, ('add', 4), None),
640 b'+': (4, None, None, (b'add', 4), None),
605 '-': (4, None, None, ('sub', 4), None),
641 b'-': (4, None, None, (b'sub', 4), None),
606 ')': (0, None, None, None, None),
642 b')': (0, None, None, None, None),
607 'symbol': (0, 'symbol', None, None, None),
643 b'symbol': (0, b'symbol', None, None, None),
608 'end': (0, None, None, None, None),
644 b'end': (0, None, None, None, None),
609 }
645 }
610
646
611 def _tokenize(text):
647 def _tokenize(text):
612 view = memoryview(text) # zero-copy slice
648 view = memoryview(text) # zero-copy slice
613 special = '():+-& '
649 special = b'():+-& '
614 pos = 0
650 pos = 0
615 length = len(text)
651 length = len(text)
616 while pos < length:
652 while pos < length:
617 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special,
653 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
618 view[pos:]))
654 view[pos:]))
619 if symbol:
655 if symbol:
620 yield ('symbol', symbol, pos)
656 yield (b'symbol', symbol, pos)
621 pos += len(symbol)
657 pos += len(symbol)
622 else: # special char, ignore space
658 else: # special char, ignore space
623 if text[pos] != ' ':
659 if text[pos] != b' ':
624 yield (text[pos], None, pos)
660 yield (text[pos], None, pos)
625 pos += 1
661 pos += 1
626 yield ('end', None, pos)
662 yield (b'end', None, pos)
627
663
628 def _parse(text):
664 def _parse(text):
629 tree, pos = parser.parser(_elements).parse(_tokenize(text))
665 tree, pos = parser.parser(_elements).parse(_tokenize(text))
630 if pos != len(text):
666 if pos != len(text):
631 raise error.ParseError('invalid token', pos)
667 raise error.ParseError(b'invalid token', pos)
632 return tree
668 return tree
633
669
634 def _parsedrev(symbol):
670 def _parsedrev(symbol):
635 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
671 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
636 if symbol.startswith('D') and symbol[1:].isdigit():
672 if symbol.startswith(b'D') and symbol[1:].isdigit():
637 return int(symbol[1:])
673 return int(symbol[1:])
638 if symbol.isdigit():
674 if symbol.isdigit():
639 return int(symbol)
675 return int(symbol)
@@ -643,11 +679,11 b' def _prefetchdrevs(tree):'
643 drevs = set()
679 drevs = set()
644 ancestordrevs = set()
680 ancestordrevs = set()
645 op = tree[0]
681 op = tree[0]
646 if op == 'symbol':
682 if op == b'symbol':
647 r = _parsedrev(tree[1])
683 r = _parsedrev(tree[1])
648 if r:
684 if r:
649 drevs.add(r)
685 drevs.add(r)
650 elif op == 'ancestors':
686 elif op == b'ancestors':
651 r, a = _prefetchdrevs(tree[1])
687 r, a = _prefetchdrevs(tree[1])
652 drevs.update(r)
688 drevs.update(r)
653 ancestordrevs.update(r)
689 ancestordrevs.update(r)
@@ -706,13 +742,14 b' def querydrev(repo, spec):'
706 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
742 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
707 if key in prefetched:
743 if key in prefetched:
708 return prefetched[key]
744 return prefetched[key]
709 drevs = callconduit(repo, 'differential.query', params)
745 drevs = callconduit(repo, b'differential.query', params)
710 # Fill prefetched with the result
746 # Fill prefetched with the result
711 for drev in drevs:
747 for drev in drevs:
712 prefetched[drev[r'phid']] = drev
748 prefetched[drev[r'phid']] = drev
713 prefetched[int(drev[r'id'])] = drev
749 prefetched[int(drev[r'id'])] = drev
714 if key not in prefetched:
750 if key not in prefetched:
715 raise error.Abort(_('cannot get Differential Revision %r') % params)
751 raise error.Abort(_(b'cannot get Differential Revision %r')
752 % params)
716 return prefetched[key]
753 return prefetched[key]
717
754
718 def getstack(topdrevids):
755 def getstack(topdrevids):
@@ -730,7 +767,7 b' def querydrev(repo, spec):'
730 auxiliary = drev.get(r'auxiliary', {})
767 auxiliary = drev.get(r'auxiliary', {})
731 depends = auxiliary.get(r'phabricator:depends-on', [])
768 depends = auxiliary.get(r'phabricator:depends-on', [])
732 for phid in depends:
769 for phid in depends:
733 queue.append({'phids': [phid]})
770 queue.append({b'phids': [phid]})
734 result.reverse()
771 result.reverse()
735 return smartset.baseset(result)
772 return smartset.baseset(result)
736
773
@@ -741,7 +778,7 b' def querydrev(repo, spec):'
741 drevs, ancestordrevs = _prefetchdrevs(tree)
778 drevs, ancestordrevs = _prefetchdrevs(tree)
742
779
743 # developer config: phabricator.batchsize
780 # developer config: phabricator.batchsize
744 batchsize = repo.ui.configint('phabricator', 'batchsize', 12)
781 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
745
782
746 # Prefetch Differential Revisions in batch
783 # Prefetch Differential Revisions in batch
747 tofetch = set(drevs)
784 tofetch = set(drevs)
@@ -754,7 +791,7 b' def querydrev(repo, spec):'
754 # Walk through the tree, return smartsets
791 # Walk through the tree, return smartsets
755 def walk(tree):
792 def walk(tree):
756 op = tree[0]
793 op = tree[0]
757 if op == 'symbol':
794 if op == b'symbol':
758 drev = _parsedrev(tree[1])
795 drev = _parsedrev(tree[1])
759 if drev:
796 if drev:
760 return smartset.baseset([drev])
797 return smartset.baseset([drev])
@@ -763,16 +800,16 b' def querydrev(repo, spec):'
763 if _getstatusname(prefetched[r]) == tree[1]]
800 if _getstatusname(prefetched[r]) == tree[1]]
764 return smartset.baseset(drevs)
801 return smartset.baseset(drevs)
765 else:
802 else:
766 raise error.Abort(_('unknown symbol: %s') % tree[1])
803 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
767 elif op in {'and_', 'add', 'sub'}:
804 elif op in {b'and_', b'add', b'sub'}:
768 assert len(tree) == 3
805 assert len(tree) == 3
769 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
806 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
770 elif op == 'group':
807 elif op == b'group':
771 return walk(tree[1])
808 return walk(tree[1])
772 elif op == 'ancestors':
809 elif op == b'ancestors':
773 return getstack(walk(tree[1]))
810 return getstack(walk(tree[1]))
774 else:
811 else:
775 raise error.ProgrammingError('illegal tree: %r' % tree)
812 raise error.ProgrammingError(b'illegal tree: %r' % tree)
776
813
777 return [prefetched[r] for r in walk(tree)]
814 return [prefetched[r] for r in walk(tree)]
778
815
@@ -786,9 +823,9 b' def getdescfromdrev(drev):'
786 summary = drev[r'summary'].rstrip()
823 summary = drev[r'summary'].rstrip()
787 testplan = drev[r'testPlan'].rstrip()
824 testplan = drev[r'testPlan'].rstrip()
788 if testplan:
825 if testplan:
789 testplan = 'Test Plan:\n%s' % testplan
826 testplan = b'Test Plan:\n%s' % testplan
790 uri = 'Differential Revision: %s' % drev[r'uri']
827 uri = b'Differential Revision: %s' % drev[r'uri']
791 return '\n\n'.join(filter(None, [title, summary, testplan, uri]))
828 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
792
829
793 def getdiffmeta(diff):
830 def getdiffmeta(diff):
794 """get commit metadata (date, node, user, p1) from a diff object
831 """get commit metadata (date, node, user, p1) from a diff object
@@ -848,16 +885,17 b' def readpatch(repo, drevs, write):'
848 """
885 """
849 # Prefetch hg:meta property for all diffs
886 # Prefetch hg:meta property for all diffs
850 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
887 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
851 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids})
888 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
852
889
853 # Generate patch for each drev
890 # Generate patch for each drev
854 for drev in drevs:
891 for drev in drevs:
855 repo.ui.note(_('reading D%s\n') % drev[r'id'])
892 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
856
893
857 diffid = max(int(v) for v in drev[r'diffs'])
894 diffid = max(int(v) for v in drev[r'diffs'])
858 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid})
895 body = callconduit(repo, b'differential.getrawdiff',
896 {b'diffID': diffid})
859 desc = getdescfromdrev(drev)
897 desc = getdescfromdrev(drev)
860 header = '# HG changeset patch\n'
898 header = b'# HG changeset patch\n'
861
899
862 # Try to preserve metadata from hg:meta property. Write hg patch
900 # Try to preserve metadata from hg:meta property. Write hg patch
863 # headers that can be read by the "import" command. See patchheadermap
901 # headers that can be read by the "import" command. See patchheadermap
@@ -865,14 +903,14 b' def readpatch(repo, drevs, write):'
865 meta = getdiffmeta(diffs[str(diffid)])
903 meta = getdiffmeta(diffs[str(diffid)])
866 for k in _metanamemap.keys():
904 for k in _metanamemap.keys():
867 if k in meta:
905 if k in meta:
868 header += '# %s %s\n' % (_metanamemap[k], meta[k])
906 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
869
907
870 content = '%s%s\n%s' % (header, desc, body)
908 content = b'%s%s\n%s' % (header, desc, body)
871 write(encoding.unitolocal(content))
909 write(encoding.unitolocal(content))
872
910
873 @command('phabread',
911 @command(b'phabread',
874 [('', 'stack', False, _('read dependencies'))],
912 [(b'', b'stack', False, _(b'read dependencies'))],
875 _('DREVSPEC [OPTIONS]'))
913 _(b'DREVSPEC [OPTIONS]'))
876 def phabread(ui, repo, spec, **opts):
914 def phabread(ui, repo, spec, **opts):
877 """print patches from Phabricator suitable for importing
915 """print patches from Phabricator suitable for importing
878
916
@@ -892,51 +930,51 b' def phabread(ui, repo, spec, **opts):'
892 If --stack is given, follow dependencies information and read all patches.
930 If --stack is given, follow dependencies information and read all patches.
893 It is equivalent to the ``:`` operator.
931 It is equivalent to the ``:`` operator.
894 """
932 """
895 if opts.get('stack'):
933 if opts.get(b'stack'):
896 spec = ':(%s)' % spec
934 spec = b':(%s)' % spec
897 drevs = querydrev(repo, spec)
935 drevs = querydrev(repo, spec)
898 readpatch(repo, drevs, ui.write)
936 readpatch(repo, drevs, ui.write)
899
937
900 @command('phabupdate',
938 @command(b'phabupdate',
901 [('', 'accept', False, _('accept revisions')),
939 [(b'', b'accept', False, _(b'accept revisions')),
902 ('', 'reject', False, _('reject revisions')),
940 (b'', b'reject', False, _(b'reject revisions')),
903 ('', 'abandon', False, _('abandon revisions')),
941 (b'', b'abandon', False, _(b'abandon revisions')),
904 ('', 'reclaim', False, _('reclaim revisions')),
942 (b'', b'reclaim', False, _(b'reclaim revisions')),
905 ('m', 'comment', '', _('comment on the last revision')),
943 (b'm', b'comment', b'', _(b'comment on the last revision')),
906 ], _('DREVSPEC [OPTIONS]'))
944 ], _(b'DREVSPEC [OPTIONS]'))
907 def phabupdate(ui, repo, spec, **opts):
945 def phabupdate(ui, repo, spec, **opts):
908 """update Differential Revision in batch
946 """update Differential Revision in batch
909
947
910 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
948 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
911 """
949 """
912 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)]
950 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
913 if len(flags) > 1:
951 if len(flags) > 1:
914 raise error.Abort(_('%s cannot be used together') % ', '.join(flags))
952 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
915
953
916 actions = []
954 actions = []
917 for f in flags:
955 for f in flags:
918 actions.append({'type': f, 'value': 'true'})
956 actions.append({b'type': f, b'value': b'true'})
919
957
920 drevs = querydrev(repo, spec)
958 drevs = querydrev(repo, spec)
921 for i, drev in enumerate(drevs):
959 for i, drev in enumerate(drevs):
922 if i + 1 == len(drevs) and opts.get('comment'):
960 if i + 1 == len(drevs) and opts.get(b'comment'):
923 actions.append({'type': 'comment', 'value': opts['comment']})
961 actions.append({b'type': b'comment', b'value': opts[b'comment']})
924 if actions:
962 if actions:
925 params = {'objectIdentifier': drev[r'phid'],
963 params = {b'objectIdentifier': drev[r'phid'],
926 'transactions': actions}
964 b'transactions': actions}
927 callconduit(repo, 'differential.revision.edit', params)
965 callconduit(repo, b'differential.revision.edit', params)
928
966
929 templatekeyword = registrar.templatekeyword()
967 templatekeyword = registrar.templatekeyword()
930
968
931 @templatekeyword('phabreview', requires={'ctx'})
969 @templatekeyword(b'phabreview', requires={b'ctx'})
932 def template_review(context, mapping):
970 def template_review(context, mapping):
933 """:phabreview: Object describing the review for this changeset.
971 """:phabreview: Object describing the review for this changeset.
934 Has attributes `url` and `id`.
972 Has attributes `url` and `id`.
935 """
973 """
936 ctx = context.resource(mapping, 'ctx')
974 ctx = context.resource(mapping, b'ctx')
937 m = _differentialrevisiondescre.search(ctx.description())
975 m = _differentialrevisiondescre.search(ctx.description())
938 if m:
976 if m:
939 return {
977 return {
940 'url': m.group('url'),
978 b'url': m.group(b'url'),
941 'id': "D{}".format(m.group('id')),
979 b'id': b"D{}".format(m.group(b'id')),
942 }
980 }
@@ -2,6 +2,7 b' test-abort-checkin.t'
2 test-add.t
2 test-add.t
3 test-addremove-similar.t
3 test-addremove-similar.t
4 test-addremove.t
4 test-addremove.t
5 test-alias.t
5 test-amend-subrepo.t
6 test-amend-subrepo.t
6 test-amend.t
7 test-amend.t
7 test-ancestor.py
8 test-ancestor.py
@@ -14,6 +15,7 b' test-audit-subrepo.t'
14 test-automv.t
15 test-automv.t
15 test-backout.t
16 test-backout.t
16 test-backwards-remove.t
17 test-backwards-remove.t
18 test-bad-pull.t
17 test-basic.t
19 test-basic.t
18 test-bheads.t
20 test-bheads.t
19 test-bisect.t
21 test-bisect.t
@@ -22,6 +24,7 b' test-bisect3.t'
22 test-blackbox.t
24 test-blackbox.t
23 test-bookmarks-current.t
25 test-bookmarks-current.t
24 test-bookmarks-merge.t
26 test-bookmarks-merge.t
27 test-bookmarks-pushpull.t
25 test-bookmarks-rebase.t
28 test-bookmarks-rebase.t
26 test-bookmarks-strip.t
29 test-bookmarks-strip.t
27 test-bookmarks.t
30 test-bookmarks.t
@@ -30,16 +33,24 b' test-branch-option.t'
30 test-branch-tag-confict.t
33 test-branch-tag-confict.t
31 test-branches.t
34 test-branches.t
32 test-bundle-phases.t
35 test-bundle-phases.t
36 test-bundle-r.t
33 test-bundle-type.t
37 test-bundle-type.t
34 test-bundle-vs-outgoing.t
38 test-bundle-vs-outgoing.t
39 test-bundle.t
40 test-bundle2-exchange.t
41 test-bundle2-format.t
35 test-bundle2-multiple-changegroups.t
42 test-bundle2-multiple-changegroups.t
43 test-bundle2-pushback.t
44 test-bundle2-remote-changegroup.t
36 test-cappedreader.py
45 test-cappedreader.py
37 test-casecollision.t
46 test-casecollision.t
38 test-cat.t
47 test-cat.t
48 test-cbor.py
39 test-censor.t
49 test-censor.t
40 test-changelog-exec.t
50 test-changelog-exec.t
41 test-check-commit.t
51 test-check-commit.t
42 test-check-execute.t
52 test-check-execute.t
53 test-check-interfaces.py
43 test-check-module-imports.t
54 test-check-module-imports.t
44 test-check-pyflakes.t
55 test-check-pyflakes.t
45 test-check-pylint.t
56 test-check-pylint.t
@@ -49,7 +60,7 b' test-clone-cgi.t'
49 test-clone-pull-corruption.t
60 test-clone-pull-corruption.t
50 test-clone-r.t
61 test-clone-r.t
51 test-clone-update-order.t
62 test-clone-update-order.t
52 test-command-template.t
63 test-clonebundles.t
53 test-commit-amend.t
64 test-commit-amend.t
54 test-commit-interactive.t
65 test-commit-interactive.t
55 test-commit-multiple.t
66 test-commit-multiple.t
@@ -61,10 +72,16 b' test-config-env.py'
61 test-config.t
72 test-config.t
62 test-conflict.t
73 test-conflict.t
63 test-confused-revert.t
74 test-confused-revert.t
75 test-context.py
64 test-contrib-check-code.t
76 test-contrib-check-code.t
65 test-contrib-check-commit.t
77 test-contrib-check-commit.t
66 test-convert-authormap.t
78 test-convert-authormap.t
67 test-convert-clonebranches.t
79 test-convert-clonebranches.t
80 test-convert-cvs-branch.t
81 test-convert-cvs-detectmerge.t
82 test-convert-cvs-synthetic.t
83 test-convert-cvs.t
84 test-convert-cvsnt-mergepoints.t
68 test-convert-datesort.t
85 test-convert-datesort.t
69 test-convert-filemap.t
86 test-convert-filemap.t
70 test-convert-hg-sink.t
87 test-convert-hg-sink.t
@@ -81,6 +98,7 b' test-debugextensions.t'
81 test-debugindexdot.t
98 test-debugindexdot.t
82 test-debugrename.t
99 test-debugrename.t
83 test-default-push.t
100 test-default-push.t
101 test-diff-antipatience.t
84 test-diff-binary-file.t
102 test-diff-binary-file.t
85 test-diff-change.t
103 test-diff-change.t
86 test-diff-copy-depth.t
104 test-diff-copy-depth.t
@@ -99,6 +117,7 b' test-directaccess.t'
99 test-dirstate-backup.t
117 test-dirstate-backup.t
100 test-dirstate-nonnormalset.t
118 test-dirstate-nonnormalset.t
101 test-dirstate.t
119 test-dirstate.t
120 test-dispatch.py
102 test-doctest.py
121 test-doctest.py
103 test-double-merge.t
122 test-double-merge.t
104 test-drawdag.t
123 test-drawdag.t
@@ -114,8 +133,11 b' test-encoding.t'
114 test-eol-add.t
133 test-eol-add.t
115 test-eol-clone.t
134 test-eol-clone.t
116 test-eol-hook.t
135 test-eol-hook.t
136 test-eol-patch.t
117 test-eol-tag.t
137 test-eol-tag.t
118 test-eol-update.t
138 test-eol-update.t
139 test-eol.t
140 test-eolfilename.t
119 test-excessive-merge.t
141 test-excessive-merge.t
120 test-exchange-obsmarkers-case-A1.t
142 test-exchange-obsmarkers-case-A1.t
121 test-exchange-obsmarkers-case-A2.t
143 test-exchange-obsmarkers-case-A2.t
@@ -143,9 +165,15 b' test-execute-bit.t'
143 test-export.t
165 test-export.t
144 test-extdata.t
166 test-extdata.t
145 test-extdiff.t
167 test-extdiff.t
168 test-extensions-afterloaded.t
169 test-extensions-wrapfunction.py
146 test-extra-filelog-entry.t
170 test-extra-filelog-entry.t
171 test-fetch.t
147 test-filebranch.t
172 test-filebranch.t
173 test-filecache.py
174 test-filelog.py
148 test-fileset-generated.t
175 test-fileset-generated.t
176 test-fileset.t
149 test-fix-topology.t
177 test-fix-topology.t
150 test-flags.t
178 test-flags.t
151 test-generaldelta.t
179 test-generaldelta.t
@@ -158,10 +186,12 b' test-hg-parseurl.py'
158 test-hghave.t
186 test-hghave.t
159 test-hgignore.t
187 test-hgignore.t
160 test-hgk.t
188 test-hgk.t
189 test-hgrc.t
161 test-hgweb-bundle.t
190 test-hgweb-bundle.t
162 test-hgweb-descend-empties.t
191 test-hgweb-descend-empties.t
163 test-hgweb-empty.t
192 test-hgweb-empty.t
164 test-hgweb-removed.t
193 test-hgweb-removed.t
194 test-hgwebdir-paths.py
165 test-hgwebdirsym.t
195 test-hgwebdirsym.t
166 test-histedit-arguments.t
196 test-histedit-arguments.t
167 test-histedit-base.t
197 test-histedit-base.t
@@ -171,6 +201,7 b' test-histedit-drop.t'
171 test-histedit-edit.t
201 test-histedit-edit.t
172 test-histedit-fold-non-commute.t
202 test-histedit-fold-non-commute.t
173 test-histedit-fold.t
203 test-histedit-fold.t
204 test-histedit-no-backup.t
174 test-histedit-no-change.t
205 test-histedit-no-change.t
175 test-histedit-non-commute-abort.t
206 test-histedit-non-commute-abort.t
176 test-histedit-non-commute.t
207 test-histedit-non-commute.t
@@ -181,12 +212,18 b' test-http-branchmap.t'
181 test-http-bundle1.t
212 test-http-bundle1.t
182 test-http-clone-r.t
213 test-http-clone-r.t
183 test-http.t
214 test-http.t
215 test-hybridencode.py
184 test-identify.t
216 test-identify.t
217 test-impexp-branch.t
218 test-import-bypass.t
219 test-import-eol.t
220 test-import-merge.t
185 test-import-unknown.t
221 test-import-unknown.t
186 test-import.t
222 test-import.t
187 test-imports-checker.t
223 test-imports-checker.t
188 test-incoming-outgoing.t
224 test-incoming-outgoing.t
189 test-inherit-mode.t
225 test-inherit-mode.t
226 test-init.t
190 test-issue1089.t
227 test-issue1089.t
191 test-issue1102.t
228 test-issue1102.t
192 test-issue1175.t
229 test-issue1175.t
@@ -209,12 +246,14 b' test-issue842.t'
209 test-journal-exists.t
246 test-journal-exists.t
210 test-journal-share.t
247 test-journal-share.t
211 test-journal.t
248 test-journal.t
249 test-known.t
212 test-largefiles-cache.t
250 test-largefiles-cache.t
213 test-largefiles-misc.t
251 test-largefiles-misc.t
214 test-largefiles-small-disk.t
252 test-largefiles-small-disk.t
215 test-largefiles-update.t
253 test-largefiles-update.t
216 test-largefiles.t
254 test-largefiles.t
217 test-lfs-largefiles.t
255 test-lfs-largefiles.t
256 test-lfs-pointer.py
218 test-linerange.py
257 test-linerange.py
219 test-locate.t
258 test-locate.t
220 test-lock-badness.t
259 test-lock-badness.t
@@ -254,6 +293,8 b' test-merge6.t'
254 test-merge7.t
293 test-merge7.t
255 test-merge8.t
294 test-merge8.t
256 test-merge9.t
295 test-merge9.t
296 test-minifileset.py
297 test-minirst.py
257 test-mq-git.t
298 test-mq-git.t
258 test-mq-header-date.t
299 test-mq-header-date.t
259 test-mq-header-from.t
300 test-mq-header-from.t
@@ -298,8 +339,11 b' test-narrow-shallow-merges.t'
298 test-narrow-shallow.t
339 test-narrow-shallow.t
299 test-narrow-strip.t
340 test-narrow-strip.t
300 test-narrow-update.t
341 test-narrow-update.t
342 test-narrow-widen.t
343 test-narrow.t
301 test-nested-repo.t
344 test-nested-repo.t
302 test-newbranch.t
345 test-newbranch.t
346 test-nointerrupt.t
303 test-obshistory.t
347 test-obshistory.t
304 test-obsmarker-template.t
348 test-obsmarker-template.t
305 test-obsmarkers-effectflag.t
349 test-obsmarkers-effectflag.t
@@ -307,10 +351,16 b' test-obsolete-bundle-strip.t'
307 test-obsolete-changeset-exchange.t
351 test-obsolete-changeset-exchange.t
308 test-obsolete-checkheads.t
352 test-obsolete-checkheads.t
309 test-obsolete-distributed.t
353 test-obsolete-distributed.t
354 test-obsolete-divergent.t
310 test-obsolete-tag-cache.t
355 test-obsolete-tag-cache.t
356 test-pager.t
311 test-parents.t
357 test-parents.t
358 test-parseindex2.py
359 test-patch-offset.t
360 test-patch.t
312 test-pathconflicts-merge.t
361 test-pathconflicts-merge.t
313 test-pathconflicts-update.t
362 test-pathconflicts-update.t
363 test-pathencode.py
314 test-pending.t
364 test-pending.t
315 test-permissions.t
365 test-permissions.t
316 test-phases.t
366 test-phases.t
@@ -320,6 +370,7 b' test-pull-permission.t'
320 test-pull-pull-corruption.t
370 test-pull-pull-corruption.t
321 test-pull-r.t
371 test-pull-r.t
322 test-pull-update.t
372 test-pull-update.t
373 test-pull.t
323 test-purge.t
374 test-purge.t
324 test-push-checkheads-partial-C1.t
375 test-push-checkheads-partial-C1.t
325 test-push-checkheads-partial-C2.t
376 test-push-checkheads-partial-C2.t
@@ -350,7 +401,9 b' test-push-checkheads-unpushed-D6.t'
350 test-push-checkheads-unpushed-D7.t
401 test-push-checkheads-unpushed-D7.t
351 test-push-http.t
402 test-push-http.t
352 test-push-warn.t
403 test-push-warn.t
404 test-push.t
353 test-pushvars.t
405 test-pushvars.t
406 test-qrecord.t
354 test-rebase-abort.t
407 test-rebase-abort.t
355 test-rebase-base-flag.t
408 test-rebase-base-flag.t
356 test-rebase-bookmarks.t
409 test-rebase-bookmarks.t
@@ -378,9 +431,11 b' test-rebase-rename.t'
378 test-rebase-scenario-global.t
431 test-rebase-scenario-global.t
379 test-rebase-templates.t
432 test-rebase-templates.t
380 test-rebase-transaction.t
433 test-rebase-transaction.t
434 test-rebuildstate.t
381 test-record.t
435 test-record.t
382 test-relink.t
436 test-relink.t
383 test-remove.t
437 test-remove.t
438 test-removeemptydirs.t
384 test-rename-after-merge.t
439 test-rename-after-merge.t
385 test-rename-dir-merge.t
440 test-rename-dir-merge.t
386 test-rename-merge1.t
441 test-rename-merge1.t
@@ -389,11 +444,14 b' test-repair-strip.t'
389 test-repo-compengines.t
444 test-repo-compengines.t
390 test-resolve.t
445 test-resolve.t
391 test-revert-flags.t
446 test-revert-flags.t
447 test-revert-interactive.t
392 test-revert-unknown.t
448 test-revert-unknown.t
393 test-revlog-ancestry.py
449 test-revlog-ancestry.py
394 test-revlog-group-emptyiter.t
450 test-revlog-group-emptyiter.t
395 test-revlog-mmapindex.t
451 test-revlog-mmapindex.t
396 test-revlog-packentry.t
452 test-revlog-packentry.t
453 test-revlog-raw.py
454 test-revlog-v2.t
397 test-revset-dirstate-parents.t
455 test-revset-dirstate-parents.t
398 test-revset-legacy-lookup.t
456 test-revset-legacy-lookup.t
399 test-revset-outgoing.t
457 test-revset-outgoing.t
@@ -409,34 +467,56 b' test-show-stack.t'
409 test-show-work.t
467 test-show-work.t
410 test-show.t
468 test-show.t
411 test-simple-update.t
469 test-simple-update.t
470 test-simplekeyvaluefile.py
471 test-simplemerge.py
412 test-single-head.t
472 test-single-head.t
413 test-sparse-clear.t
473 test-sparse-clear.t
474 test-sparse-clone.t
414 test-sparse-import.t
475 test-sparse-import.t
415 test-sparse-merges.t
476 test-sparse-merges.t
416 test-sparse-profiles.t
477 test-sparse-profiles.t
417 test-sparse-requirement.t
478 test-sparse-requirement.t
418 test-sparse-verbose-json.t
479 test-sparse-verbose-json.t
480 test-sparse.t
481 test-split.t
482 test-ssh-bundle1.t
419 test-ssh-clone-r.t
483 test-ssh-clone-r.t
484 test-ssh-proto-unbundle.t
420 test-ssh-proto.t
485 test-ssh-proto.t
486 test-ssh.t
421 test-sshserver.py
487 test-sshserver.py
422 test-stack.t
488 test-stack.t
489 test-status-inprocess.py
423 test-status-rev.t
490 test-status-rev.t
424 test-status-terse.t
491 test-status-terse.t
492 test-strict.t
425 test-strip-cross.t
493 test-strip-cross.t
426 test-strip.t
494 test-strip.t
427 test-subrepo-deep-nested-change.t
495 test-subrepo-deep-nested-change.t
428 test-subrepo-missing.t
496 test-subrepo-missing.t
497 test-subrepo-paths.t
429 test-subrepo-recursion.t
498 test-subrepo-recursion.t
430 test-subrepo-relative-path.t
499 test-subrepo-relative-path.t
431 test-subrepo.t
500 test-subrepo.t
501 test-symlink-os-yes-fs-no.py
502 test-symlink-placeholder.t
432 test-symlinks.t
503 test-symlinks.t
433 test-tag.t
504 test-tag.t
434 test-tags.t
505 test-tags.t
435 test-template-engine.t
506 test-template-basic.t
507 test-template-functions.t
508 test-template-keywords.t
509 test-template-map.t
510 test-transplant.t
436 test-treemanifest.t
511 test-treemanifest.t
512 test-ui-color.py
513 test-ui-config.py
514 test-ui-verbosity.py
437 test-unamend.t
515 test-unamend.t
516 test-unbundlehash.t
438 test-uncommit.t
517 test-uncommit.t
439 test-unified-test.t
518 test-unified-test.t
519 test-unionrepo.t
440 test-unrelated-pull.t
520 test-unrelated-pull.t
441 test-up-local-change.t
521 test-up-local-change.t
442 test-update-branches.t
522 test-update-branches.t
@@ -447,11 +527,16 b' test-update-reverse.t'
447 test-upgrade-repo.t
527 test-upgrade-repo.t
448 test-url-download.t
528 test-url-download.t
449 test-url-rev.t
529 test-url-rev.t
530 test-url.py
450 test-username-newline.t
531 test-username-newline.t
451 test-verify.t
532 test-verify.t
533 test-walk.t
534 test-walkrepo.py
452 test-websub.t
535 test-websub.t
453 test-win32text.t
536 test-win32text.t
454 test-wireproto-clientreactor.py
537 test-wireproto-clientreactor.py
455 test-wireproto-framing.py
538 test-wireproto-framing.py
456 test-wireproto-serverreactor.py
539 test-wireproto-serverreactor.py
540 test-wireproto.py
541 test-wsgirequest.py
457 test-xdg.t
542 test-xdg.t
@@ -54,13 +54,16 b' from mercurial.node import ('
54 )
54 )
55 from mercurial import (
55 from mercurial import (
56 context,
56 context,
57 diffutil,
57 error,
58 error,
58 hg,
59 hg,
59 patch,
60 patch,
60 registrar,
61 registrar,
61 scmutil,
62 scmutil,
62 )
63 )
63 from mercurial.utils import dateutil
64 from mercurial.utils import (
65 dateutil,
66 )
64
67
65 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
68 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
66 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
69 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -172,13 +175,10 b' def analyze(ui, repo, *revs, **opts):'
172 revs = scmutil.revrange(repo, revs)
175 revs = scmutil.revrange(repo, revs)
173 revs.sort()
176 revs.sort()
174
177
175 progress = ui.progress
178 progress = ui.makeprogress(_('analyzing'), unit=_('changesets'),
176 _analyzing = _('analyzing')
179 total=len(revs))
177 _changesets = _('changesets')
178 _total = len(revs)
179
180 for i, rev in enumerate(revs):
180 for i, rev in enumerate(revs):
181 progress(_analyzing, i, unit=_changesets, total=_total)
181 progress.update(i)
182 ctx = repo[rev]
182 ctx = repo[rev]
183 pl = ctx.parents()
183 pl = ctx.parents()
184 pctx = pl[0]
184 pctx = pl[0]
@@ -196,7 +196,9 b' def analyze(ui, repo, *revs, **opts):'
196 if lastctx.rev() != nullrev:
196 if lastctx.rev() != nullrev:
197 timedelta = ctx.date()[0] - lastctx.date()[0]
197 timedelta = ctx.date()[0] - lastctx.date()[0]
198 interarrival[roundto(timedelta, 300)] += 1
198 interarrival[roundto(timedelta, 300)] += 1
199 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
199 diffopts = diffutil.diffallopts(ui, {'git': True})
200 diff = sum((d.splitlines()
201 for d in ctx.diff(pctx, opts=diffopts)), [])
200 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
202 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
201 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
203 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
202 if isbin:
204 if isbin:
@@ -222,6 +224,7 b' def analyze(ui, repo, *revs, **opts):'
222 filesadded[fileadds] += 1
224 filesadded[fileadds] += 1
223 dirsadded[diradds] += 1
225 dirsadded[diradds] += 1
224 filesremoved[fileremoves] += 1
226 filesremoved[fileremoves] += 1
227 progress.complete()
225
228
226 invchildren = zerodict()
229 invchildren = zerodict()
227
230
@@ -338,7 +341,6 b' def synthesize(ui, repo, descpath, **opt'
338
341
339 nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
342 nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
340
343
341 progress = ui.progress
342 _synthesizing = _('synthesizing')
344 _synthesizing = _('synthesizing')
343 _files = _('initial files')
345 _files = _('initial files')
344 _changesets = _('changesets')
346 _changesets = _('changesets')
@@ -362,8 +364,9 b' def synthesize(ui, repo, descpath, **opt'
362 path = os.path.dirname(path)
364 path = os.path.dirname(path)
363 return True
365 return True
364
366
367 progress = ui.makeprogress(_synthesizing, unit=_files, total=initcount)
365 for i in xrange(0, initcount):
368 for i in xrange(0, initcount):
366 ui.progress(_synthesizing, i, unit=_files, total=initcount)
369 progress.update(i)
367
370
368 path = pickpath()
371 path = pickpath()
369 while not validpath(path):
372 while not validpath(path):
@@ -378,7 +381,7 b' def synthesize(ui, repo, descpath, **opt'
378 def filectxfn(repo, memctx, path):
381 def filectxfn(repo, memctx, path):
379 return context.memfilectx(repo, memctx, path, files[path])
382 return context.memfilectx(repo, memctx, path, files[path])
380
383
381 ui.progress(_synthesizing, None)
384 progress.complete()
382 message = 'synthesized wide repo with %d files' % (len(files),)
385 message = 'synthesized wide repo with %d files' % (len(files),)
383 mc = context.memctx(repo, [pctx.node(), nullid], message,
386 mc = context.memctx(repo, [pctx.node(), nullid], message,
384 files, filectxfn, ui.username(),
387 files, filectxfn, ui.username(),
@@ -394,8 +397,9 b' def synthesize(ui, repo, descpath, **opt'
394 # Synthesize incremental revisions to the repository, adding repo depth.
397 # Synthesize incremental revisions to the repository, adding repo depth.
395 count = int(opts['count'])
398 count = int(opts['count'])
396 heads = set(map(repo.changelog.rev, repo.heads()))
399 heads = set(map(repo.changelog.rev, repo.heads()))
400 progress = ui.makeprogress(_synthesizing, unit=_changesets, total=count)
397 for i in xrange(count):
401 for i in xrange(count):
398 progress(_synthesizing, i, unit=_changesets, total=count)
402 progress.update(i)
399
403
400 node = repo.changelog.node
404 node = repo.changelog.node
401 revs = len(repo)
405 revs = len(repo)
@@ -485,6 +489,7 b' def synthesize(ui, repo, descpath, **opt'
485 heads.add(repo.changelog.rev(newnode))
489 heads.add(repo.changelog.rev(newnode))
486 heads.discard(r1)
490 heads.discard(r1)
487 heads.discard(r2)
491 heads.discard(r2)
492 progress.complete()
488
493
489 lock.release()
494 lock.release()
490 wlock.release()
495 wlock.release()
@@ -19,6 +19,7 b''
19 <File Name="color.txt" />
19 <File Name="color.txt" />
20 <File Name="config.txt" KeyPath="yes" />
20 <File Name="config.txt" KeyPath="yes" />
21 <File Name="dates.txt" />
21 <File Name="dates.txt" />
22 <File Name="deprecated.txt" />
22 <File Name="diffs.txt" />
23 <File Name="diffs.txt" />
23 <File Name="environment.txt" />
24 <File Name="environment.txt" />
24 <File Name="extensions.txt" />
25 <File Name="extensions.txt" />
@@ -21,8 +21,9 b' if sys.version_info[0] >= 3:'
21 else:
21 else:
22 from . import demandimportpy2 as demandimport
22 from . import demandimportpy2 as demandimport
23
23
24 # Extensions can add to this list if necessary.
24 # Full module names which can't be lazy imported.
25 ignore = [
25 # Extensions can add to this set.
26 IGNORES = {
26 '__future__',
27 '__future__',
27 '_hashlib',
28 '_hashlib',
28 # ImportError during pkg_resources/__init__.py:fixup_namespace_package
29 # ImportError during pkg_resources/__init__.py:fixup_namespace_package
@@ -55,17 +56,15 b' ignore = ['
55 '__builtin__',
56 '__builtin__',
56 'builtins',
57 'builtins',
57 'urwid.command_map', # for pudb
58 'urwid.command_map', # for pudb
58 ]
59 }
59
60
60 _pypy = '__pypy__' in sys.builtin_module_names
61 _pypy = '__pypy__' in sys.builtin_module_names
61
62
62 if _pypy:
63 if _pypy:
63 ignore.extend([
64 # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
64 # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
65 IGNORES.add('_ctypes.pointer')
65 '_ctypes.pointer',
66 ])
67
66
68 demandimport.init(ignore)
67 demandimport.init(IGNORES)
69
68
70 # Re-export.
69 # Re-export.
71 isenabled = demandimport.isenabled
70 isenabled = demandimport.isenabled
@@ -162,7 +162,7 b' class _demandmod(object):'
162 _pypy = '__pypy__' in sys.builtin_module_names
162 _pypy = '__pypy__' in sys.builtin_module_names
163
163
164 def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
164 def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
165 if locals is None or name in ignore or fromlist == ('*',):
165 if locals is None or name in ignores or fromlist == ('*',):
166 # these cases we can't really delay
166 # these cases we can't really delay
167 return _hgextimport(_origimport, name, globals, locals, fromlist, level)
167 return _hgextimport(_origimport, name, globals, locals, fromlist, level)
168 elif not fromlist:
168 elif not fromlist:
@@ -209,7 +209,7 b' def _demandimport(name, globals=None, lo'
209 # while processing the import statement.
209 # while processing the import statement.
210 return
210 return
211 mn = '%s.%s' % (mod.__name__, attr)
211 mn = '%s.%s' % (mod.__name__, attr)
212 if mn in ignore:
212 if mn in ignores:
213 importfunc = _origimport
213 importfunc = _origimport
214 else:
214 else:
215 importfunc = _demandmod
215 importfunc = _demandmod
@@ -273,11 +273,11 b' def _demandimport(name, globals=None, lo'
273
273
274 return mod
274 return mod
275
275
276 ignore = []
276 ignores = set()
277
277
278 def init(ignorelist):
278 def init(ignoreset):
279 global ignore
279 global ignores
280 ignore = ignorelist
280 ignores = ignoreset
281
281
282 def isenabled():
282 def isenabled():
283 return builtins.__import__ == _demandimport
283 return builtins.__import__ == _demandimport
@@ -40,7 +40,7 b' class _lazyloaderex(importlib.util.LazyL'
40 """
40 """
41 def exec_module(self, module):
41 def exec_module(self, module):
42 """Make the module load lazily."""
42 """Make the module load lazily."""
43 if _deactivated or module.__name__ in ignore:
43 if _deactivated or module.__name__ in ignores:
44 self.loader.exec_module(module)
44 self.loader.exec_module(module)
45 else:
45 else:
46 super().exec_module(module)
46 super().exec_module(module)
@@ -62,11 +62,11 b' def _makefinder(path):'
62 (_bytecode_loader, importlib.machinery.BYTECODE_SUFFIXES),
62 (_bytecode_loader, importlib.machinery.BYTECODE_SUFFIXES),
63 )
63 )
64
64
65 ignore = []
65 ignores = set()
66
66
67 def init(ignorelist):
67 def init(ignoreset):
68 global ignore
68 global ignores
69 ignore = ignorelist
69 ignores = ignoreset
70
70
71 def isenabled():
71 def isenabled():
72 return _makefinder in sys.path_hooks and not _deactivated
72 return _makefinder in sys.path_hooks and not _deactivated
@@ -57,6 +57,28 b' access control. Keys in these sections a'
57 a glob syntax by default). The corresponding values follow the same
57 a glob syntax by default). The corresponding values follow the same
58 syntax as the other sections above.
58 syntax as the other sections above.
59
59
60 Bookmark-based Access Control
61 -----------------------------
62 Use the ``acl.deny.bookmarks`` and ``acl.allow.bookmarks`` sections to
63 have bookmark-based access control. Keys in these sections can be
64 either:
65
66 - a bookmark name, or
67 - an asterisk, to match any bookmark;
68
69 The corresponding values can be either:
70
71 - a comma-separated list containing users and groups, or
72 - an asterisk, to match anyone;
73
74 You can add the "!" prefix to a user or group name to invert the sense
75 of the match.
76
77 Note: for interactions between clients and servers using Mercurial 3.6+
78 a rejection will generally reject the entire push, for interactions
79 involving older clients, the commit transactions will already be accepted,
80 and only the bookmark movement will be rejected.
81
60 Groups
82 Groups
61 ------
83 ------
62
84
@@ -326,9 +348,10 b' def hook(ui, repo, hooktype, node=None, '
326
348
327 ensureenabled(ui)
349 ensureenabled(ui)
328
350
329 if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
351 if hooktype not in ['pretxnchangegroup', 'pretxncommit', 'prepushkey']:
330 raise error.Abort(_('config error - hook type "%s" cannot stop '
352 raise error.Abort(
331 'incoming changesets nor commits') % hooktype)
353 _('config error - hook type "%s" cannot stop '
354 'incoming changesets, commits, nor bookmarks') % hooktype)
332 if (hooktype == 'pretxnchangegroup' and
355 if (hooktype == 'pretxnchangegroup' and
333 source not in ui.configlist('acl', 'sources')):
356 source not in ui.configlist('acl', 'sources')):
334 ui.debug('acl: changes have source "%s" - skipping\n' % source)
357 ui.debug('acl: changes have source "%s" - skipping\n' % source)
@@ -345,6 +368,30 b' def hook(ui, repo, hooktype, node=None, '
345
368
346 ui.debug('acl: checking access for user "%s"\n' % user)
369 ui.debug('acl: checking access for user "%s"\n' % user)
347
370
371 if hooktype == 'prepushkey':
372 _pkhook(ui, repo, hooktype, node, source, user, **kwargs)
373 else:
374 _txnhook(ui, repo, hooktype, node, source, user, **kwargs)
375
376 def _pkhook(ui, repo, hooktype, node, source, user, **kwargs):
377 if kwargs['namespace'] == 'bookmarks':
378 bookmark = kwargs['key']
379 ctx = kwargs['new']
380 allowbookmarks = buildmatch(ui, None, user, 'acl.allow.bookmarks')
381 denybookmarks = buildmatch(ui, None, user, 'acl.deny.bookmarks')
382
383 if denybookmarks and denybookmarks(bookmark):
384 raise error.Abort(_('acl: user "%s" denied on bookmark "%s"'
385 ' (changeset "%s")')
386 % (user, bookmark, ctx))
387 if allowbookmarks and not allowbookmarks(bookmark):
388 raise error.Abort(_('acl: user "%s" not allowed on bookmark "%s"'
389 ' (changeset "%s")')
390 % (user, bookmark, ctx))
391 ui.debug('acl: bookmark access granted: "%s" on bookmark "%s"\n'
392 % (ctx, bookmark))
393
394 def _txnhook(ui, repo, hooktype, node, source, user, **kwargs):
348 # deprecated config: acl.config
395 # deprecated config: acl.config
349 cfg = ui.config('acl', 'config')
396 cfg = ui.config('acl', 'config')
350 if cfg:
397 if cfg:
@@ -32,7 +32,6 b' from mercurial.node import short'
32
32
33 from mercurial import (
33 from mercurial import (
34 error,
34 error,
35 lock as lockmod,
36 registrar,
35 registrar,
37 revlog,
36 revlog,
38 scmutil,
37 scmutil,
@@ -52,13 +51,8 b" testedwith = 'ships-with-hg-core'"
52 ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))],
51 ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))],
53 _('-r REV [-t TEXT] [FILE]'))
52 _('-r REV [-t TEXT] [FILE]'))
54 def censor(ui, repo, path, rev='', tombstone='', **opts):
53 def censor(ui, repo, path, rev='', tombstone='', **opts):
55 wlock = lock = None
54 with repo.wlock(), repo.lock():
56 try:
57 wlock = repo.wlock()
58 lock = repo.lock()
59 return _docensor(ui, repo, path, rev, tombstone, **opts)
55 return _docensor(ui, repo, path, rev, tombstone, **opts)
60 finally:
61 lockmod.release(lock, wlock)
62
56
63 def _docensor(ui, repo, path, rev='', tombstone='', **opts):
57 def _docensor(ui, repo, path, rev='', tombstone='', **opts):
64 if not path:
58 if not path:
@@ -52,7 +52,7 b' def countrate(ui, repo, amap, *pats, **o'
52 def getkey(ctx):
52 def getkey(ctx):
53 t, tz = ctx.date()
53 t, tz = ctx.date()
54 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
54 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
55 return date.strftime(opts['dateformat'])
55 return date.strftime(encoding.strfromlocal(opts['dateformat']))
56 else:
56 else:
57 tmpl = opts.get('oldtemplate') or opts.get('template')
57 tmpl = opts.get('oldtemplate') or opts.get('template')
58 tmpl = logcmdutil.maketemplater(ui, repo, tmpl)
58 tmpl = logcmdutil.maketemplater(ui, repo, tmpl)
@@ -61,7 +61,8 b' def countrate(ui, repo, amap, *pats, **o'
61 tmpl.show(ctx)
61 tmpl.show(ctx)
62 return ui.popbuffer()
62 return ui.popbuffer()
63
63
64 state = {'count': 0}
64 progress = ui.makeprogress(_('analyzing'), unit=_('revisions'),
65 total=len(repo))
65 rate = {}
66 rate = {}
66 df = False
67 df = False
67 if opts.get('date'):
68 if opts.get('date'):
@@ -87,14 +88,12 b' def countrate(ui, repo, amap, *pats, **o'
87 lines = changedlines(ui, repo, ctx1, ctx, fns)
88 lines = changedlines(ui, repo, ctx1, ctx, fns)
88 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
89 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
89
90
90 state['count'] += 1
91 progress.increment()
91 ui.progress(_('analyzing'), state['count'], total=len(repo),
92 unit=_('revisions'))
93
92
94 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
93 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
95 continue
94 continue
96
95
97 ui.progress(_('analyzing'), None)
96 progress.complete()
98
97
99 return rate
98 return rate
100
99
@@ -161,7 +160,7 b' def churn(ui, repo, *pats, **opts):'
161 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
160 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
162 aliases = repo.wjoin('.hgchurn')
161 aliases = repo.wjoin('.hgchurn')
163 if aliases:
162 if aliases:
164 for l in open(aliases, "r"):
163 for l in open(aliases, "rb"):
165 try:
164 try:
166 alias, actual = l.rsplit('=' in l and '=' or None, 1)
165 alias, actual = l.rsplit('=' in l and '=' or None, 1)
167 amap[alias.strip()] = actual.strip()
166 amap[alias.strip()] = actual.strip()
@@ -204,6 +204,14 b' def convert(ui, src, dest=None, revmapfi'
204
204
205 :convert.hg.revs: revset specifying the source revisions to convert.
205 :convert.hg.revs: revset specifying the source revisions to convert.
206
206
207 Bazaar Source
208 #############
209
210 The following options can be used with ``--config``:
211
212 :convert.bzr.saverev: whether to store the original Bazaar commit ID in
213 the metadata of the destination commit. The default is True.
214
207 CVS Source
215 CVS Source
208 ##########
216 ##########
209
217
@@ -19,7 +19,7 b' from mercurial import ('
19 from . import common
19 from . import common
20
20
21 # these do not work with demandimport, blacklist
21 # these do not work with demandimport, blacklist
22 demandimport.ignore.extend([
22 demandimport.IGNORES.update([
23 'bzrlib.transactions',
23 'bzrlib.transactions',
24 'bzrlib.urlutils',
24 'bzrlib.urlutils',
25 'ElementPath',
25 'ElementPath',
@@ -65,6 +65,7 b' class bzr_source(common.converter_source'
65 raise common.NoRepo(_('%s does not look like a Bazaar repository')
65 raise common.NoRepo(_('%s does not look like a Bazaar repository')
66 % path)
66 % path)
67 self._parentids = {}
67 self._parentids = {}
68 self._saverev = ui.configbool('convert', 'bzr.saverev')
68
69
69 def _checkrepotype(self, path):
70 def _checkrepotype(self, path):
70 # Lightweight checkouts detection is informational but probably
71 # Lightweight checkouts detection is informational but probably
@@ -175,7 +176,8 b' class bzr_source(common.converter_source'
175 author=self.recode(rev.committer),
176 author=self.recode(rev.committer),
176 desc=self.recode(rev.message),
177 desc=self.recode(rev.message),
177 branch=branch,
178 branch=branch,
178 rev=version)
179 rev=version,
180 saverev=self._saverev)
179
181
180 def gettags(self):
182 def gettags(self):
181 bytetags = {}
183 bytetags = {}
@@ -214,7 +214,7 b' class converter_source(object):'
214 if not encoding:
214 if not encoding:
215 encoding = self.encoding or 'utf-8'
215 encoding = self.encoding or 'utf-8'
216
216
217 if isinstance(s, unicode):
217 if isinstance(s, pycompat.unicode):
218 return s.encode("utf-8")
218 return s.encode("utf-8")
219 try:
219 try:
220 return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
220 return s.decode(pycompat.sysstr(encoding)).encode("utf-8")
@@ -55,7 +55,7 b' svn_source = subversion.svn_source'
55 orig_encoding = 'ascii'
55 orig_encoding = 'ascii'
56
56
57 def recode(s):
57 def recode(s):
58 if isinstance(s, unicode):
58 if isinstance(s, pycompat.unicode):
59 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
59 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
60 else:
60 else:
61 return s.decode('utf-8').encode(
61 return s.decode('utf-8').encode(
@@ -123,7 +123,7 b' def convertsource(ui, path, type, revs):'
123 exceptions.append(inst)
123 exceptions.append(inst)
124 if not ui.quiet:
124 if not ui.quiet:
125 for inst in exceptions:
125 for inst in exceptions:
126 ui.write("%s\n" % inst)
126 ui.write("%s\n" % pycompat.bytestr(inst))
127 raise error.Abort(_('%s: missing or unsupported repository') % path)
127 raise error.Abort(_('%s: missing or unsupported repository') % path)
128
128
129 def convertsink(ui, path, type):
129 def convertsink(ui, path, type):
@@ -143,13 +143,11 b' class progresssource(object):'
143 def __init__(self, ui, source, filecount):
143 def __init__(self, ui, source, filecount):
144 self.ui = ui
144 self.ui = ui
145 self.source = source
145 self.source = source
146 self.filecount = filecount
146 self.progress = ui.makeprogress(_('getting files'), unit=_('files'),
147 self.retrieved = 0
147 total=filecount)
148
148
149 def getfile(self, file, rev):
149 def getfile(self, file, rev):
150 self.retrieved += 1
150 self.progress.increment(item=file)
151 self.ui.progress(_('getting files'), self.retrieved,
152 item=file, total=self.filecount, unit=_('files'))
153 return self.source.getfile(file, rev)
151 return self.source.getfile(file, rev)
154
152
155 def targetfilebelongstosource(self, targetfilename):
153 def targetfilebelongstosource(self, targetfilename):
@@ -159,7 +157,7 b' class progresssource(object):'
159 return self.source.lookuprev(rev)
157 return self.source.lookuprev(rev)
160
158
161 def close(self):
159 def close(self):
162 self.ui.progress(_('getting files'), None)
160 self.progress.complete()
163
161
164 class converter(object):
162 class converter(object):
165 def __init__(self, ui, source, dest, revmapfile, opts):
163 def __init__(self, ui, source, dest, revmapfile, opts):
@@ -234,10 +232,12 b' class converter(object):'
234 def walktree(self, heads):
232 def walktree(self, heads):
235 '''Return a mapping that identifies the uncommitted parents of every
233 '''Return a mapping that identifies the uncommitted parents of every
236 uncommitted changeset.'''
234 uncommitted changeset.'''
237 visit = heads
235 visit = list(heads)
238 known = set()
236 known = set()
239 parents = {}
237 parents = {}
240 numcommits = self.source.numcommits()
238 numcommits = self.source.numcommits()
239 progress = self.ui.makeprogress(_('scanning'), unit=_('revisions'),
240 total=numcommits)
241 while visit:
241 while visit:
242 n = visit.pop(0)
242 n = visit.pop(0)
243 if n in known:
243 if n in known:
@@ -247,14 +247,13 b' class converter(object):'
247 if m == SKIPREV or self.dest.hascommitfrommap(m):
247 if m == SKIPREV or self.dest.hascommitfrommap(m):
248 continue
248 continue
249 known.add(n)
249 known.add(n)
250 self.ui.progress(_('scanning'), len(known), unit=_('revisions'),
250 progress.update(len(known))
251 total=numcommits)
252 commit = self.cachecommit(n)
251 commit = self.cachecommit(n)
253 parents[n] = []
252 parents[n] = []
254 for p in commit.parents:
253 for p in commit.parents:
255 parents[n].append(p)
254 parents[n].append(p)
256 visit.append(p)
255 visit.append(p)
257 self.ui.progress(_('scanning'), None)
256 progress.complete()
258
257
259 return parents
258 return parents
260
259
@@ -510,6 +509,8 b' class converter(object):'
510 c = None
509 c = None
511
510
512 self.ui.status(_("converting...\n"))
511 self.ui.status(_("converting...\n"))
512 progress = self.ui.makeprogress(_('converting'),
513 unit=_('revisions'), total=len(t))
513 for i, c in enumerate(t):
514 for i, c in enumerate(t):
514 num -= 1
515 num -= 1
515 desc = self.commitcache[c].desc
516 desc = self.commitcache[c].desc
@@ -520,10 +521,9 b' class converter(object):'
520 # uses is 'utf-8'
521 # uses is 'utf-8'
521 self.ui.status("%d %s\n" % (num, recode(desc)))
522 self.ui.status("%d %s\n" % (num, recode(desc)))
522 self.ui.note(_("source: %s\n") % recode(c))
523 self.ui.note(_("source: %s\n") % recode(c))
523 self.ui.progress(_('converting'), i, unit=_('revisions'),
524 progress.update(i)
524 total=len(t))
525 self.copy(c)
525 self.copy(c)
526 self.ui.progress(_('converting'), None)
526 progress.complete()
527
527
528 if not self.ui.configbool('convert', 'skiptags'):
528 if not self.ui.configbool('convert', 'skiptags'):
529 tags = self.source.gettags()
529 tags = self.source.gettags()
@@ -6,6 +6,7 b''
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import functools
9 import os
10 import os
10 import re
11 import re
11
12
@@ -50,8 +51,8 b' class logentry(object):'
50 self.__dict__.update(entries)
51 self.__dict__.update(entries)
51
52
52 def __repr__(self):
53 def __repr__(self):
53 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
54 items = (r"%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
54 return "%s(%s)"%(type(self).__name__, ", ".join(items))
55 return r"%s(%s)"%(type(self).__name__, r", ".join(items))
55
56
56 class logerror(Exception):
57 class logerror(Exception):
57 pass
58 pass
@@ -110,25 +111,25 b' def createlog(ui, directory=None, root="'
110 log = [] # list of logentry objects containing the CVS state
111 log = [] # list of logentry objects containing the CVS state
111
112
112 # patterns to match in CVS (r)log output, by state of use
113 # patterns to match in CVS (r)log output, by state of use
113 re_00 = re.compile('RCS file: (.+)$')
114 re_00 = re.compile(b'RCS file: (.+)$')
114 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
115 re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
115 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
116 re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
116 re_03 = re.compile("(Cannot access.+CVSROOT)|"
117 re_03 = re.compile(b"(Cannot access.+CVSROOT)|"
117 "(can't create temporary directory.+)$")
118 b"(can't create temporary directory.+)$")
118 re_10 = re.compile('Working file: (.+)$')
119 re_10 = re.compile(b'Working file: (.+)$')
119 re_20 = re.compile('symbolic names:')
120 re_20 = re.compile(b'symbolic names:')
120 re_30 = re.compile('\t(.+): ([\\d.]+)$')
121 re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
121 re_31 = re.compile('----------------------------$')
122 re_31 = re.compile(b'----------------------------$')
122 re_32 = re.compile('======================================='
123 re_32 = re.compile(b'======================================='
123 '======================================$')
124 b'======================================$')
124 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
125 re_50 = re.compile(b'revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
125 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
126 re_60 = re.compile(br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
126 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
127 br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
127 r'(\s+commitid:\s+([^;]+);)?'
128 br'(\s+commitid:\s+([^;]+);)?'
128 r'(.*mergepoint:\s+([^;]+);)?')
129 br'(.*mergepoint:\s+([^;]+);)?')
129 re_70 = re.compile('branches: (.+);$')
130 re_70 = re.compile(b'branches: (.+);$')
130
131
131 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
132 file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
132
133
133 prefix = '' # leading path to strip of what we get from CVS
134 prefix = '' # leading path to strip of what we get from CVS
134
135
@@ -509,7 +510,8 b' def createlog(ui, directory=None, root="'
509 comment = entry.comment
510 comment = entry.comment
510 for e in encodings:
511 for e in encodings:
511 try:
512 try:
512 entry.comment = comment.decode(e).encode('utf-8')
513 entry.comment = comment.decode(
514 pycompat.sysstr(e)).encode('utf-8')
513 if ui.debugflag:
515 if ui.debugflag:
514 ui.debug("transcoding by %s: %s of %s\n" %
516 ui.debug("transcoding by %s: %s of %s\n" %
515 (e, revstr(entry.revision), entry.file))
517 (e, revstr(entry.revision), entry.file))
@@ -565,11 +567,15 b' def createchangeset(ui, log, fuzz=60, me'
565 mindate = {}
567 mindate = {}
566 for e in log:
568 for e in log:
567 if e.commitid:
569 if e.commitid:
568 mindate[e.commitid] = min(e.date, mindate.get(e.commitid))
570 if e.commitid not in mindate:
571 mindate[e.commitid] = e.date
572 else:
573 mindate[e.commitid] = min(e.date, mindate[e.commitid])
569
574
570 # Merge changesets
575 # Merge changesets
571 log.sort(key=lambda x: (mindate.get(x.commitid), x.commitid, x.comment,
576 log.sort(key=lambda x: (mindate.get(x.commitid, (-1, 0)),
572 x.author, x.branch, x.date, x.branchpoints))
577 x.commitid or '', x.comment,
578 x.author, x.branch or '', x.date, x.branchpoints))
573
579
574 changesets = []
580 changesets = []
575 files = set()
581 files = set()
@@ -653,7 +659,7 b' def createchangeset(ui, log, fuzz=60, me'
653 return 0
659 return 0
654
660
655 for c in changesets:
661 for c in changesets:
656 c.entries.sort(entitycompare)
662 c.entries.sort(key=functools.cmp_to_key(entitycompare))
657
663
658 # Sort changesets by date
664 # Sort changesets by date
659
665
@@ -706,7 +712,7 b' def createchangeset(ui, log, fuzz=60, me'
706 d = c(len(l.branchpoints), len(r.branchpoints))
712 d = c(len(l.branchpoints), len(r.branchpoints))
707 return d
713 return d
708
714
709 changesets.sort(cscmp)
715 changesets.sort(key=functools.cmp_to_key(cscmp))
710
716
711 # Collect tags
717 # Collect tags
712
718
@@ -729,12 +735,12 b' def createchangeset(ui, log, fuzz=60, me'
729 # {{mergefrombranch BRANCHNAME}} by setting two parents.
735 # {{mergefrombranch BRANCHNAME}} by setting two parents.
730
736
731 if mergeto is None:
737 if mergeto is None:
732 mergeto = r'{{mergetobranch ([-\w]+)}}'
738 mergeto = br'{{mergetobranch ([-\w]+)}}'
733 if mergeto:
739 if mergeto:
734 mergeto = re.compile(mergeto)
740 mergeto = re.compile(mergeto)
735
741
736 if mergefrom is None:
742 if mergefrom is None:
737 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
743 mergefrom = br'{{mergefrombranch ([-\w]+)}}'
738 if mergefrom:
744 if mergefrom:
739 mergefrom = re.compile(mergefrom)
745 mergefrom = re.compile(mergefrom)
740
746
@@ -797,7 +803,7 b' def createchangeset(ui, log, fuzz=60, me'
797 except KeyError:
803 except KeyError:
798 ui.warn(_("warning: CVS commit message references "
804 ui.warn(_("warning: CVS commit message references "
799 "non-existent branch %r:\n%s\n")
805 "non-existent branch %r:\n%s\n")
800 % (m, c.comment))
806 % (pycompat.bytestr(m), c.comment))
801 if m in branches and c.branch != m and not candidate.synthetic:
807 if m in branches and c.branch != m and not candidate.synthetic:
802 c.parents.append(candidate)
808 c.parents.append(candidate)
803
809
@@ -940,7 +946,8 b' def debugcvsps(ui, *args, **opts):'
940 if fn.startswith(opts["prefix"]):
946 if fn.startswith(opts["prefix"]):
941 fn = fn[len(opts["prefix"]):]
947 fn = fn[len(opts["prefix"]):]
942 ui.write('\t%s:%s->%s%s \n' % (
948 ui.write('\t%s:%s->%s%s \n' % (
943 fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
949 fn,
950 '.'.join([b"%d" % x for x in f.parent]) or 'INITIAL',
944 '.'.join([(b"%d" % x) for x in f.revision]),
951 '.'.join([(b"%d" % x) for x in f.revision]),
945 ['', '(DEAD)'][f.dead]))
952 ['', '(DEAD)'][f.dead]))
946 ui.write('\n')
953 ui.write('\n')
@@ -10,10 +10,11 b' import errno'
10 import os
10 import os
11 import re
11 import re
12 import shutil
12 import shutil
13 import tempfile
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 error,
16 error,
17 pycompat,
17 util,
18 util,
18 )
19 )
19 from mercurial.utils import dateutil
20 from mercurial.utils import dateutil
@@ -76,7 +77,7 b' class darcs_source(common.converter_sour'
76 self.ui.warn(_('failed to detect repository format!'))
77 self.ui.warn(_('failed to detect repository format!'))
77
78
78 def before(self):
79 def before(self):
79 self.tmppath = tempfile.mkdtemp(
80 self.tmppath = pycompat.mkdtemp(
80 prefix='convert-' + os.path.basename(self.path) + '-')
81 prefix='convert-' + os.path.basename(self.path) + '-')
81 output, status = self.run('init', repodir=self.tmppath)
82 output, status = self.run('init', repodir=self.tmppath)
82 self.checkexit(status)
83 self.checkexit(status)
@@ -103,7 +104,7 b' class darcs_source(common.converter_sour'
103 shutil.rmtree(self.tmppath, ignore_errors=True)
104 shutil.rmtree(self.tmppath, ignore_errors=True)
104
105
105 def recode(self, s, encoding=None):
106 def recode(self, s, encoding=None):
106 if isinstance(s, unicode):
107 if isinstance(s, pycompat.unicode):
107 # XMLParser returns unicode objects for anything it can't
108 # XMLParser returns unicode objects for anything it can't
108 # encode into ASCII. We convert them back to str to get
109 # encode into ASCII. We convert them back to str to get
109 # recode's normal conversion behavior.
110 # recode's normal conversion behavior.
@@ -125,8 +126,7 b' class darcs_source(common.converter_sour'
125 return etree.getroot()
126 return etree.getroot()
126
127
127 def format(self):
128 def format(self):
128 output, status = self.run('show', 'repo', no_files=True,
129 output, status = self.run('show', 'repo', repodir=self.path)
129 repodir=self.path)
130 self.checkexit(status)
130 self.checkexit(status)
131 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
131 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
132 if not m:
132 if not m:
@@ -5,7 +5,6 b' from __future__ import absolute_import'
5
5
6 import os
6 import os
7 import re
7 import re
8 import tempfile
9 import xml.dom.minidom
8 import xml.dom.minidom
10
9
11 from mercurial.i18n import _
10 from mercurial.i18n import _
@@ -751,9 +750,10 b' class svn_source(converter_source):'
751 self.module = new_module
750 self.module = new_module
752 self.reparent(self.module)
751 self.reparent(self.module)
753
752
753 progress = self.ui.makeprogress(_('scanning paths'), unit=_('paths'),
754 total=len(paths))
754 for i, (path, ent) in enumerate(paths):
755 for i, (path, ent) in enumerate(paths):
755 self.ui.progress(_('scanning paths'), i, item=path,
756 progress.update(i, item=path)
756 total=len(paths), unit=_('paths'))
757 entrypath = self.getrelpath(path)
757 entrypath = self.getrelpath(path)
758
758
759 kind = self._checkpath(entrypath, revnum)
759 kind = self._checkpath(entrypath, revnum)
@@ -839,7 +839,7 b' class svn_source(converter_source):'
839 copytopath = self.getrelpath(copytopath)
839 copytopath = self.getrelpath(copytopath)
840 copies[self.recode(copytopath)] = self.recode(childpath)
840 copies[self.recode(copytopath)] = self.recode(childpath)
841
841
842 self.ui.progress(_('scanning paths'), None)
842 progress.complete()
843 changed.update(removed)
843 changed.update(removed)
844 return (list(changed), removed, copies)
844 return (list(changed), removed, copies)
845
845
@@ -1081,7 +1081,7 b' class svn_source(converter_source):'
1081 ' hg executable is in PATH'))
1081 ' hg executable is in PATH'))
1082 return logstream(stdout)
1082 return logstream(stdout)
1083
1083
1084 pre_revprop_change = '''#!/bin/sh
1084 pre_revprop_change = b'''#!/bin/sh
1085
1085
1086 REPOS="$1"
1086 REPOS="$1"
1087 REV="$2"
1087 REV="$2"
@@ -1098,8 +1098,8 b' exit 1'
1098 '''
1098 '''
1099
1099
1100 class svn_sink(converter_sink, commandline):
1100 class svn_sink(converter_sink, commandline):
1101 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1101 commit_re = re.compile(br'Committed revision (\d+).', re.M)
1102 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1102 uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M)
1103
1103
1104 def prerun(self):
1104 def prerun(self):
1105 if self.wc:
1105 if self.wc:
@@ -1225,7 +1225,7 b' class svn_sink(converter_sink, commandli'
1225 wdest = self.wjoin(dest)
1225 wdest = self.wjoin(dest)
1226 exists = os.path.lexists(wdest)
1226 exists = os.path.lexists(wdest)
1227 if exists:
1227 if exists:
1228 fd, tempname = tempfile.mkstemp(
1228 fd, tempname = pycompat.mkstemp(
1229 prefix='hg-copy-', dir=os.path.dirname(wdest))
1229 prefix='hg-copy-', dir=os.path.dirname(wdest))
1230 os.close(fd)
1230 os.close(fd)
1231 os.unlink(tempname)
1231 os.unlink(tempname)
@@ -1313,7 +1313,7 b' class svn_sink(converter_sink, commandli'
1313 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1313 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1314 self.setexec = []
1314 self.setexec = []
1315
1315
1316 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1316 fd, messagefile = pycompat.mkstemp(prefix='hg-convert-')
1317 fp = os.fdopen(fd, r'wb')
1317 fp = os.fdopen(fd, r'wb')
1318 fp.write(util.tonativeeol(commit.desc))
1318 fp.write(util.tonativeeol(commit.desc))
1319 fp.close()
1319 fp.close()
@@ -142,7 +142,7 b' def tolf(s, params, ui, **kwargs):'
142 if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
142 if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
143 return s
143 return s
144 if (ui.configbool('eol', 'fix-trailing-newline')
144 if (ui.configbool('eol', 'fix-trailing-newline')
145 and s and s[-1] != '\n'):
145 and s and not s.endswith('\n')):
146 s = s + '\n'
146 s = s + '\n'
147 return util.tolf(s)
147 return util.tolf(s)
148
148
@@ -153,7 +153,7 b' def tocrlf(s, params, ui, **kwargs):'
153 if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
153 if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
154 return s
154 return s
155 if (ui.configbool('eol', 'fix-trailing-newline')
155 if (ui.configbool('eol', 'fix-trailing-newline')
156 and s and s[-1] != '\n'):
156 and s and not s.endswith('\n')):
157 s = s + '\n'
157 s = s + '\n'
158 return util.tocrlf(s)
158 return util.tocrlf(s)
159
159
@@ -71,7 +71,7 b' import os'
71 import re
71 import re
72 import shutil
72 import shutil
73 import stat
73 import stat
74 import tempfile
74
75 from mercurial.i18n import _
75 from mercurial.i18n import _
76 from mercurial.node import (
76 from mercurial.node import (
77 nullid,
77 nullid,
@@ -210,7 +210,7 b' def dodiff(ui, repo, cmdline, pats, opts'
210 if not common:
210 if not common:
211 return 0
211 return 0
212
212
213 tmproot = tempfile.mkdtemp(prefix='extdiff.')
213 tmproot = pycompat.mkdtemp(prefix='extdiff.')
214 try:
214 try:
215 if not opts.get('patch'):
215 if not opts.get('patch'):
216 # Always make a copy of node1a (and node1b, if applicable)
216 # Always make a copy of node1a (and node1b, if applicable)
@@ -70,6 +70,7 b' from mercurial import ('
70 registrar,
70 registrar,
71 scmutil,
71 scmutil,
72 util,
72 util,
73 worker,
73 )
74 )
74
75
75 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
76 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -133,32 +134,56 b' def fix(ui, repo, *pats, **opts):'
133 raise error.Abort(_('cannot specify both "--rev" and "--all"'))
134 raise error.Abort(_('cannot specify both "--rev" and "--all"'))
134 opts['rev'] = ['not public() and not obsolete()']
135 opts['rev'] = ['not public() and not obsolete()']
135 opts['working_dir'] = True
136 opts['working_dir'] = True
136 with repo.wlock(), repo.lock():
137 with repo.wlock(), repo.lock(), repo.transaction('fix'):
137 revstofix = getrevstofix(ui, repo, opts)
138 revstofix = getrevstofix(ui, repo, opts)
138 basectxs = getbasectxs(repo, opts, revstofix)
139 basectxs = getbasectxs(repo, opts, revstofix)
139 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
140 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
140 basectxs)
141 basectxs)
142 fixers = getfixers(ui)
143
144 # There are no data dependencies between the workers fixing each file
145 # revision, so we can use all available parallelism.
146 def getfixes(items):
147 for rev, path in items:
148 ctx = repo[rev]
149 olddata = ctx[path].data()
150 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
151 # Don't waste memory/time passing unchanged content back, but
152 # produce one result per item either way.
153 yield (rev, path, newdata if newdata != olddata else None)
154 results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue)
155
156 # We have to hold on to the data for each successor revision in memory
157 # until all its parents are committed. We ensure this by committing and
158 # freeing memory for the revisions in some topological order. This
159 # leaves a little bit of memory efficiency on the table, but also makes
160 # the tests deterministic. It might also be considered a feature since
161 # it makes the results more easily reproducible.
141 filedata = collections.defaultdict(dict)
162 filedata = collections.defaultdict(dict)
142 replacements = {}
163 replacements = {}
143 fixers = getfixers(ui)
164 commitorder = sorted(revstofix, reverse=True)
144 # Some day this loop can become a worker pool, but for now it's easier
165 with ui.makeprogress(topic=_('fixing'), unit=_('files'),
145 # to fix everything serially in topological order.
166 total=sum(numitems.values())) as progress:
146 for rev, path in sorted(workqueue):
167 for rev, path, newdata in results:
147 ctx = repo[rev]
168 progress.increment(item=path)
148 olddata = ctx[path].data()
169 if newdata is not None:
149 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
170 filedata[rev][path] = newdata
150 if newdata != olddata:
171 numitems[rev] -= 1
151 filedata[rev][path] = newdata
172 # Apply the fixes for this and any other revisions that are
152 numitems[rev] -= 1
173 # ready and sitting at the front of the queue. Using a loop here
153 if not numitems[rev]:
174 # prevents the queue from being blocked by the first revision to
154 if rev == wdirrev:
175 # be ready out of order.
155 writeworkingdir(repo, ctx, filedata[rev], replacements)
176 while commitorder and not numitems[commitorder[-1]]:
156 else:
177 rev = commitorder.pop()
157 replacerev(ui, repo, ctx, filedata[rev], replacements)
178 ctx = repo[rev]
158 del filedata[rev]
179 if rev == wdirrev:
180 writeworkingdir(repo, ctx, filedata[rev], replacements)
181 else:
182 replacerev(ui, repo, ctx, filedata[rev], replacements)
183 del filedata[rev]
159
184
160 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
185 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
161 scmutil.cleanupnodes(repo, replacements, 'fix')
186 scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
162
187
163 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
188 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
164 """"Constructs the list of files to be fixed at specific revisions
189 """"Constructs the list of files to be fixed at specific revisions
@@ -168,11 +193,19 b' def getworkqueue(ui, repo, pats, opts, r'
168 topological order. Each work item represents a file in the working copy or
193 topological order. Each work item represents a file in the working copy or
169 in some revision that should be fixed and written back to the working copy
194 in some revision that should be fixed and written back to the working copy
170 or into a replacement revision.
195 or into a replacement revision.
196
197 Work items for the same revision are grouped together, so that a worker
198 pool starting with the first N items in parallel is likely to finish the
199 first revision's work before other revisions. This can allow us to write
200 the result to disk and reduce memory footprint. At time of writing, the
201 partition strategy in worker.py seems favorable to this. We also sort the
202 items by ascending revision number to match the order in which we commit
203 the fixes later.
171 """
204 """
172 workqueue = []
205 workqueue = []
173 numitems = collections.defaultdict(int)
206 numitems = collections.defaultdict(int)
174 maxfilesize = ui.configbytes('fix', 'maxfilesize')
207 maxfilesize = ui.configbytes('fix', 'maxfilesize')
175 for rev in revstofix:
208 for rev in sorted(revstofix):
176 fixctx = repo[rev]
209 fixctx = repo[rev]
177 match = scmutil.match(fixctx, pats, opts)
210 match = scmutil.match(fixctx, pats, opts)
178 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
211 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
@@ -352,7 +385,9 b' def getbasectxs(repo, opts, revstofix):'
352 """Returns a map of the base contexts for each revision
385 """Returns a map of the base contexts for each revision
353
386
354 The base contexts determine which lines are considered modified when we
387 The base contexts determine which lines are considered modified when we
355 attempt to fix just the modified lines in a file.
388 attempt to fix just the modified lines in a file. It also determines which
389 files we attempt to fix, so it is important to compute this even when
390 --whole is used.
356 """
391 """
357 # The --base flag overrides the usual logic, and we give every revision
392 # The --base flag overrides the usual logic, and we give every revision
358 # exactly the set of baserevs that the user specified.
393 # exactly the set of baserevs that the user specified.
@@ -484,25 +519,23 b' def replacerev(ui, repo, ctx, filedata, '
484 isexec=fctx.isexec(),
519 isexec=fctx.isexec(),
485 copied=copied)
520 copied=copied)
486
521
487 overrides = {('phases', 'new-commit'): ctx.phase()}
522 memctx = context.memctx(
488 with ui.configoverride(overrides, source='fix'):
523 repo,
489 memctx = context.memctx(
524 parents=(newp1node, newp2node),
490 repo,
525 text=ctx.description(),
491 parents=(newp1node, newp2node),
526 files=set(ctx.files()) | set(filedata.keys()),
492 text=ctx.description(),
527 filectxfn=filectxfn,
493 files=set(ctx.files()) | set(filedata.keys()),
528 user=ctx.user(),
494 filectxfn=filectxfn,
529 date=ctx.date(),
495 user=ctx.user(),
530 extra=ctx.extra(),
496 date=ctx.date(),
531 branch=ctx.branch(),
497 extra=ctx.extra(),
532 editor=None)
498 branch=ctx.branch(),
533 sucnode = memctx.commit()
499 editor=None)
534 prenode = ctx.node()
500 sucnode = memctx.commit()
535 if prenode == sucnode:
501 prenode = ctx.node()
536 ui.debug('node %s already existed\n' % (ctx.hex()))
502 if prenode == sucnode:
537 else:
503 ui.debug('node %s already existed\n' % (ctx.hex()))
538 replacements[ctx.node()] = sucnode
504 else:
505 replacements[ctx.node()] = sucnode
506
539
507 def getfixers(ui):
540 def getfixers(ui):
508 """Returns a map of configured fixer tools indexed by their names
541 """Returns a map of configured fixer tools indexed by their names
@@ -67,7 +67,7 b' def githelp(ui, repo, *args, **kwargs):'
67
67
68 cmd = args[0]
68 cmd = args[0]
69 if not cmd in gitcommands:
69 if not cmd in gitcommands:
70 raise error.Abort("error: unknown git command %s" % (cmd))
70 raise error.Abort(_("error: unknown git command %s") % (cmd))
71
71
72 ui.pager('githelp')
72 ui.pager('githelp')
73 args = args[1:]
73 args = args[1:]
@@ -90,14 +90,13 b' def parseoptions(ui, cmdoptions, args):'
90 elif ('-' + ex.opt) in ex.msg:
90 elif ('-' + ex.opt) in ex.msg:
91 flag = '-' + ex.opt
91 flag = '-' + ex.opt
92 else:
92 else:
93 raise error.Abort("unknown option %s" % ex.opt)
93 raise error.Abort(_("unknown option %s") % ex.opt)
94 try:
94 try:
95 args.remove(flag)
95 args.remove(flag)
96 except Exception:
96 except Exception:
97 raise error.Abort(
97 msg = _("unknown option '%s' packed with other options")
98 "unknown option {0} packed with other options\n"
98 hint = _("please try passing the option as its own flag: -%s")
99 "Please try passing the option as it's own flag: -{0}" \
99 raise error.Abort(msg % ex.opt, hint=hint % ex.opt)
100 .format(ex.opt))
101
100
102 ui.warn(_("ignoring unknown option %s\n") % flag)
101 ui.warn(_("ignoring unknown option %s\n") % flag)
103
102
@@ -171,7 +170,7 b' def add(ui, repo, *args, **kwargs):'
171 cmd.extend(args)
170 cmd.extend(args)
172 else:
171 else:
173 ui.status(_("note: use hg addremove to remove files that have "
172 ui.status(_("note: use hg addremove to remove files that have "
174 "been deleted.\n\n"))
173 "been deleted\n\n"))
175
174
176 ui.status((bytes(cmd)), "\n")
175 ui.status((bytes(cmd)), "\n")
177
176
@@ -196,7 +195,7 b' def apply(ui, repo, *args, **kwargs):'
196 ui.status((bytes(cmd)), "\n")
195 ui.status((bytes(cmd)), "\n")
197
196
198 def bisect(ui, repo, *args, **kwargs):
197 def bisect(ui, repo, *args, **kwargs):
199 ui.status(_("See 'hg help bisect' for how to use bisect.\n\n"))
198 ui.status(_("see 'hg help bisect' for how to use bisect\n\n"))
200
199
201 def blame(ui, repo, *args, **kwargs):
200 def blame(ui, repo, *args, **kwargs):
202 cmdoptions = [
201 cmdoptions = [
@@ -236,6 +235,8 b' def branch(ui, repo, *args, **kwargs):'
236 # shell command to output the active bookmark for the active
235 # shell command to output the active bookmark for the active
237 # revision
236 # revision
238 old = '`hg log -T"{activebookmark}" -r .`'
237 old = '`hg log -T"{activebookmark}" -r .`'
238 else:
239 raise error.Abort(_('missing newbranch argument'))
239 new = args[0]
240 new = args[0]
240 cmd['-m'] = old
241 cmd['-m'] = old
241 cmd.append(new)
242 cmd.append(new)
@@ -334,7 +335,7 b' def checkout(ui, repo, *args, **kwargs):'
334 cmd = Command('revert')
335 cmd = Command('revert')
335 cmd['--all'] = None
336 cmd['--all'] = None
336 else:
337 else:
337 raise error.Abort("a commit must be specified")
338 raise error.Abort(_("a commit must be specified"))
338
339
339 ui.status((bytes(cmd)), "\n")
340 ui.status((bytes(cmd)), "\n")
340
341
@@ -353,7 +354,7 b' def cherrypick(ui, repo, *args, **kwargs'
353 if opts.get('continue'):
354 if opts.get('continue'):
354 cmd['--continue'] = None
355 cmd['--continue'] = None
355 elif opts.get('abort'):
356 elif opts.get('abort'):
356 ui.status(_("note: hg graft does not have --abort.\n\n"))
357 ui.status(_("note: hg graft does not have --abort\n\n"))
357 return
358 return
358 else:
359 else:
359 cmd.extend(args)
360 cmd.extend(args)
@@ -384,7 +385,7 b' def clone(ui, repo, *args, **kwargs):'
384 args, opts = parseoptions(ui, cmdoptions, args)
385 args, opts = parseoptions(ui, cmdoptions, args)
385
386
386 if len(args) == 0:
387 if len(args) == 0:
387 raise error.Abort("a repository to clone must be specified")
388 raise error.Abort(_("a repository to clone must be specified"))
388
389
389 cmd = Command('clone')
390 cmd = Command('clone')
390 cmd.append(args[0])
391 cmd.append(args[0])
@@ -393,8 +394,8 b' def clone(ui, repo, *args, **kwargs):'
393
394
394 if opts.get('bare'):
395 if opts.get('bare'):
395 cmd['-U'] = None
396 cmd['-U'] = None
396 ui.status(_("note: Mercurial does not have bare clones. " +
397 ui.status(_("note: Mercurial does not have bare clones. "
397 "-U will clone the repo without checking out a commit\n\n"))
398 "-U will clone the repo without checking out a commit\n\n"))
398 elif opts.get('no_checkout'):
399 elif opts.get('no_checkout'):
399 cmd['-U'] = None
400 cmd['-U'] = None
400
401
@@ -436,9 +437,9 b' def commit(ui, repo, *args, **kwargs):'
436 cmd['-m'] = "'%s'" % (opts.get('message'),)
437 cmd['-m'] = "'%s'" % (opts.get('message'),)
437
438
438 if opts.get('all'):
439 if opts.get('all'):
439 ui.status(_("note: Mercurial doesn't have a staging area, " +
440 ui.status(_("note: Mercurial doesn't have a staging area, "
440 "so there is no --all. -A will add and remove files " +
441 "so there is no --all. -A will add and remove files "
441 "for you though.\n\n"))
442 "for you though.\n\n"))
442
443
443 if opts.get('file'):
444 if opts.get('file'):
444 cmd['-l'] = opts.get('file')
445 cmd['-l'] = opts.get('file')
@@ -454,8 +455,8 b' def commit(ui, repo, *args, **kwargs):'
454 ui.status((bytes(cmd)), "\n")
455 ui.status((bytes(cmd)), "\n")
455
456
456 def deprecated(ui, repo, *args, **kwargs):
457 def deprecated(ui, repo, *args, **kwargs):
457 ui.warn(_('This command has been deprecated in the git project, ' +
458 ui.warn(_('this command has been deprecated in the git project, '
458 'thus isn\'t supported by this tool.\n\n'))
459 'thus isn\'t supported by this tool\n\n'))
459
460
460 def diff(ui, repo, *args, **kwargs):
461 def diff(ui, repo, *args, **kwargs):
461 cmdoptions = [
462 cmdoptions = [
@@ -468,8 +469,8 b' def diff(ui, repo, *args, **kwargs):'
468 cmd = Command('diff')
469 cmd = Command('diff')
469
470
470 if opts.get('cached'):
471 if opts.get('cached'):
471 ui.status(_('note: Mercurial has no concept of a staging area, ' +
472 ui.status(_('note: Mercurial has no concept of a staging area, '
472 'so --cached does nothing.\n\n'))
473 'so --cached does nothing\n\n'))
473
474
474 if opts.get('reverse'):
475 if opts.get('reverse'):
475 cmd['--reverse'] = None
476 cmd['--reverse'] = None
@@ -505,10 +506,10 b' def fetch(ui, repo, *args, **kwargs):'
505 if len(args) > 0:
506 if len(args) > 0:
506 cmd.append(args[0])
507 cmd.append(args[0])
507 if len(args) > 1:
508 if len(args) > 1:
508 ui.status(_("note: Mercurial doesn't have refspecs. " +
509 ui.status(_("note: Mercurial doesn't have refspecs. "
509 "-r can be used to specify which commits you want to pull. " +
510 "-r can be used to specify which commits you want to "
510 "-B can be used to specify which bookmark you want to pull." +
511 "pull. -B can be used to specify which bookmark you "
511 "\n\n"))
512 "want to pull.\n\n"))
512 for v in args[1:]:
513 for v in args[1:]:
513 if v in repo._bookmarks:
514 if v in repo._bookmarks:
514 cmd['-B'] = v
515 cmd['-B'] = v
@@ -556,10 +557,10 b' def log(ui, repo, *args, **kwargs):'
556 ('p', 'patch', None, ''),
557 ('p', 'patch', None, ''),
557 ]
558 ]
558 args, opts = parseoptions(ui, cmdoptions, args)
559 args, opts = parseoptions(ui, cmdoptions, args)
559 ui.status(_('note: -v prints the entire commit message like Git does. To ' +
560 ui.status(_('note: -v prints the entire commit message like Git does. To '
560 'print just the first line, drop the -v.\n\n'))
561 'print just the first line, drop the -v.\n\n'))
561 ui.status(_("note: see hg help revset for information on how to filter " +
562 ui.status(_("note: see hg help revset for information on how to filter "
562 "log output.\n\n"))
563 "log output\n\n"))
563
564
564 cmd = Command('log')
565 cmd = Command('log')
565 cmd['-v'] = None
566 cmd['-v'] = None
@@ -578,13 +579,13 b' def log(ui, repo, *args, **kwargs):'
578 if opts.get('pretty') or opts.get('format') or opts.get('oneline'):
579 if opts.get('pretty') or opts.get('format') or opts.get('oneline'):
579 format = opts.get('format', '')
580 format = opts.get('format', '')
580 if 'format:' in format:
581 if 'format:' in format:
581 ui.status(_("note: --format format:??? equates to Mercurial's " +
582 ui.status(_("note: --format format:??? equates to Mercurial's "
582 "--template. See hg help templates for more info.\n\n"))
583 "--template. See hg help templates for more info.\n\n"))
583 cmd['--template'] = '???'
584 cmd['--template'] = '???'
584 else:
585 else:
585 ui.status(_("note: --pretty/format/oneline equate to Mercurial's " +
586 ui.status(_("note: --pretty/format/oneline equate to Mercurial's "
586 "--style or --template. See hg help templates for more info." +
587 "--style or --template. See hg help templates for "
587 "\n\n"))
588 "more info.\n\n"))
588 cmd['--style'] = '???'
589 cmd['--style'] = '???'
589
590
590 if len(args) > 0:
591 if len(args) > 0:
@@ -654,8 +655,8 b' def mergebase(ui, repo, *args, **kwargs)'
654 cmd = Command("log -T '{node}\\n' -r 'ancestor(%s,%s)'"
655 cmd = Command("log -T '{node}\\n' -r 'ancestor(%s,%s)'"
655 % (args[0], args[1]))
656 % (args[0], args[1]))
656
657
657 ui.status(_('NOTE: ancestors() is part of the revset language.\n'),
658 ui.status(_('note: ancestors() is part of the revset language\n'),
658 _("Learn more about revsets with 'hg help revsets'\n\n"))
659 _("(learn more about revsets with 'hg help revsets')\n\n"))
659 ui.status((bytes(cmd)), "\n")
660 ui.status((bytes(cmd)), "\n")
660
661
661 def mergetool(ui, repo, *args, **kwargs):
662 def mergetool(ui, repo, *args, **kwargs):
@@ -697,10 +698,10 b' def pull(ui, repo, *args, **kwargs):'
697 if len(args) > 0:
698 if len(args) > 0:
698 cmd.append(args[0])
699 cmd.append(args[0])
699 if len(args) > 1:
700 if len(args) > 1:
700 ui.status(_("note: Mercurial doesn't have refspecs. " +
701 ui.status(_("note: Mercurial doesn't have refspecs. "
701 "-r can be used to specify which commits you want to pull. " +
702 "-r can be used to specify which commits you want to "
702 "-B can be used to specify which bookmark you want to pull." +
703 "pull. -B can be used to specify which bookmark you "
703 "\n\n"))
704 "want to pull.\n\n"))
704 for v in args[1:]:
705 for v in args[1:]:
705 if v in repo._bookmarks:
706 if v in repo._bookmarks:
706 cmd['-B'] = v
707 cmd['-B'] = v
@@ -721,10 +722,10 b' def push(ui, repo, *args, **kwargs):'
721 if len(args) > 0:
722 if len(args) > 0:
722 cmd.append(args[0])
723 cmd.append(args[0])
723 if len(args) > 1:
724 if len(args) > 1:
724 ui.status(_("note: Mercurial doesn't have refspecs. " +
725 ui.status(_("note: Mercurial doesn't have refspecs. "
725 "-r can be used to specify which commits you want to push. " +
726 "-r can be used to specify which commits you want "
726 "-B can be used to specify which bookmark you want to push." +
727 "to push. -B can be used to specify which bookmark "
727 "\n\n"))
728 "you want to push.\n\n"))
728 for v in args[1:]:
729 for v in args[1:]:
729 if v in repo._bookmarks:
730 if v in repo._bookmarks:
730 cmd['-B'] = v
731 cmd['-B'] = v
@@ -748,12 +749,12 b' def rebase(ui, repo, *args, **kwargs):'
748 args, opts = parseoptions(ui, cmdoptions, args)
749 args, opts = parseoptions(ui, cmdoptions, args)
749
750
750 if opts.get('interactive'):
751 if opts.get('interactive'):
751 ui.status(_("note: hg histedit does not perform a rebase. " +
752 ui.status(_("note: hg histedit does not perform a rebase. "
752 "It just edits history.\n\n"))
753 "It just edits history.\n\n"))
753 cmd = Command('histedit')
754 cmd = Command('histedit')
754 if len(args) > 0:
755 if len(args) > 0:
755 ui.status(_("also note: 'hg histedit' will automatically detect"
756 ui.status(_("also note: 'hg histedit' will automatically detect"
756 " your stack, so no second argument is necessary.\n\n"))
757 " your stack, so no second argument is necessary\n\n"))
757 ui.status((bytes(cmd)), "\n")
758 ui.status((bytes(cmd)), "\n")
758 return
759 return
759
760
@@ -769,12 +770,12 b' def rebase(ui, repo, *args, **kwargs):'
769 cmd['--abort'] = None
770 cmd['--abort'] = None
770
771
771 if opts.get('onto'):
772 if opts.get('onto'):
772 ui.status(_("note: if you're trying to lift a commit off one branch, " +
773 ui.status(_("note: if you're trying to lift a commit off one branch, "
773 "try hg rebase -d <destination commit> -s <commit to be lifted>" +
774 "try hg rebase -d <destination commit> -s <commit to be "
774 "\n\n"))
775 "lifted>\n\n"))
775 cmd['-d'] = convert(opts.get('onto'))
776 cmd['-d'] = convert(opts.get('onto'))
776 if len(args) < 2:
777 if len(args) < 2:
777 raise error.Abort("Expected format: git rebase --onto X Y Z")
778 raise error.Abort(_("expected format: git rebase --onto X Y Z"))
778 cmd['-s'] = "'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
779 cmd['-s'] = "'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
779 else:
780 else:
780 if len(args) == 1:
781 if len(args) == 1:
@@ -799,7 +800,7 b' def reflog(ui, repo, *args, **kwargs):'
799
800
800 ui.status(bytes(cmd), "\n\n")
801 ui.status(bytes(cmd), "\n\n")
801 ui.status(_("note: in hg commits can be deleted from repo but we always"
802 ui.status(_("note: in hg commits can be deleted from repo but we always"
802 " have backups.\n"))
803 " have backups\n"))
803
804
804 def reset(ui, repo, *args, **kwargs):
805 def reset(ui, repo, *args, **kwargs):
805 cmdoptions = [
806 cmdoptions = [
@@ -813,10 +814,10 b' def reset(ui, repo, *args, **kwargs):'
813 hard = opts.get('hard')
814 hard = opts.get('hard')
814
815
815 if opts.get('mixed'):
816 if opts.get('mixed'):
816 ui.status(_('NOTE: --mixed has no meaning since Mercurial has no '
817 ui.status(_('note: --mixed has no meaning since Mercurial has no '
817 'staging area\n\n'))
818 'staging area\n\n'))
818 if opts.get('soft'):
819 if opts.get('soft'):
819 ui.status(_('NOTE: --soft has no meaning since Mercurial has no '
820 ui.status(_('note: --soft has no meaning since Mercurial has no '
820 'staging area\n\n'))
821 'staging area\n\n'))
821
822
822 cmd = Command('update')
823 cmd = Command('update')
@@ -833,7 +834,7 b' def revert(ui, repo, *args, **kwargs):'
833 args, opts = parseoptions(ui, cmdoptions, args)
834 args, opts = parseoptions(ui, cmdoptions, args)
834
835
835 if len(args) > 1:
836 if len(args) > 1:
836 ui.status(_("note: hg backout doesn't support multiple commits at " +
837 ui.status(_("note: hg backout doesn't support multiple commits at "
837 "once\n\n"))
838 "once\n\n"))
838
839
839 cmd = Command('backout')
840 cmd = Command('backout')
@@ -930,8 +931,8 b' def stash(ui, repo, *args, **kwargs):'
930 cmd['--keep'] = None
931 cmd['--keep'] = None
931 elif (action == 'branch' or action == 'show' or action == 'clear'
932 elif (action == 'branch' or action == 'show' or action == 'clear'
932 or action == 'create'):
933 or action == 'create'):
933 ui.status(_("note: Mercurial doesn't have equivalents to the " +
934 ui.status(_("note: Mercurial doesn't have equivalents to the "
934 "git stash branch, show, clear, or create actions.\n\n"))
935 "git stash branch, show, clear, or create actions\n\n"))
935 return
936 return
936 else:
937 else:
937 if len(args) > 0:
938 if len(args) > 0:
@@ -957,9 +958,11 b' def status(ui, repo, *args, **kwargs):'
957 ui.status((bytes(cmd)), "\n")
958 ui.status((bytes(cmd)), "\n")
958
959
959 def svn(ui, repo, *args, **kwargs):
960 def svn(ui, repo, *args, **kwargs):
961 if not args:
962 raise error.Abort(_('missing svn command'))
960 svncmd = args[0]
963 svncmd = args[0]
961 if not svncmd in gitsvncommands:
964 if svncmd not in gitsvncommands:
962 ui.warn(_("error: unknown git svn command %s\n") % (svncmd))
965 raise error.Abort(_('unknown git svn command "%s"') % (svncmd))
963
966
964 args = args[1:]
967 args = args[1:]
965 return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
968 return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
@@ -988,6 +991,9 b' def svnfindrev(ui, repo, *args, **kwargs'
988 ]
991 ]
989 args, opts = parseoptions(ui, cmdoptions, args)
992 args, opts = parseoptions(ui, cmdoptions, args)
990
993
994 if not args:
995 raise error.Abort(_('missing find-rev argument'))
996
991 cmd = Command('log')
997 cmd = Command('log')
992 cmd['-r'] = args[0]
998 cmd['-r'] = args[0]
993
999
@@ -1020,6 +1026,10 b' def tag(ui, repo, *args, **kwargs):'
1020 cmd = Command('tags')
1026 cmd = Command('tags')
1021 else:
1027 else:
1022 cmd = Command('tag')
1028 cmd = Command('tag')
1029
1030 if not args:
1031 raise error.Abort(_('missing tag argument'))
1032
1023 cmd.append(args[0])
1033 cmd.append(args[0])
1024 if len(args) > 1:
1034 if len(args) > 1:
1025 cmd['-r'] = args[1]
1035 cmd['-r'] = args[1]
@@ -9,7 +9,6 b' from __future__ import absolute_import'
9
9
10 import binascii
10 import binascii
11 import os
11 import os
12 import tempfile
13
12
14 from mercurial.i18n import _
13 from mercurial.i18n import _
15 from mercurial import (
14 from mercurial import (
@@ -61,11 +60,11 b' class gpg(object):'
61 sigfile = datafile = None
60 sigfile = datafile = None
62 try:
61 try:
63 # create temporary files
62 # create temporary files
64 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
63 fd, sigfile = pycompat.mkstemp(prefix="hg-gpg-", suffix=".sig")
65 fp = os.fdopen(fd, r'wb')
64 fp = os.fdopen(fd, r'wb')
66 fp.write(sig)
65 fp.write(sig)
67 fp.close()
66 fp.close()
68 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
67 fd, datafile = pycompat.mkstemp(prefix="hg-gpg-", suffix=".txt")
69 fp = os.fdopen(fd, r'wb')
68 fp = os.fdopen(fd, r'wb')
70 fp.write(data)
69 fp.write(data)
71 fp.close()
70 fp.close()
@@ -36,7 +36,6 b' from mercurial.hgweb import ('
36
36
37 from mercurial import (
37 from mercurial import (
38 extensions,
38 extensions,
39 fileset,
40 )
39 )
41
40
42 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
41 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -51,9 +50,8 b' def pygmentize(web, field, fctx, tmpl):'
51 filenameonly = web.configbool('web', 'highlightonlymatchfilename', False)
50 filenameonly = web.configbool('web', 'highlightonlymatchfilename', False)
52
51
53 ctx = fctx.changectx()
52 ctx = fctx.changectx()
54 tree = fileset.parse(expr)
53 m = ctx.matchfileset(expr)
55 mctx = fileset.matchctx(ctx, subset=[fctx.path()], status=None)
54 if m(fctx.path()):
56 if fctx.path() in fileset.getset(mctx, tree):
57 highlight.pygmentize(field, fctx, style, tmpl,
55 highlight.pygmentize(field, fctx, style, tmpl,
58 guessfilenameonly=filenameonly)
56 guessfilenameonly=filenameonly)
59
57
@@ -11,7 +11,7 b''
11 from __future__ import absolute_import
11 from __future__ import absolute_import
12
12
13 from mercurial import demandimport
13 from mercurial import demandimport
14 demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__'])
14 demandimport.IGNORES.update(['pkgutil', 'pkg_resources', '__main__'])
15
15
16 from mercurial import (
16 from mercurial import (
17 encoding,
17 encoding,
@@ -44,7 +44,8 b' SYNTAX_CSS = (\'\\n<link rel="stylesheet" '
44 def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
44 def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
45
45
46 # append a <link ...> to the syntax highlighting css
46 # append a <link ...> to the syntax highlighting css
47 old_header = tmpl.load('header')
47 tmpl.load('header')
48 old_header = tmpl.cache['header']
48 if SYNTAX_CSS not in old_header:
49 if SYNTAX_CSS not in old_header:
49 new_header = old_header + SYNTAX_CSS
50 new_header = old_header + SYNTAX_CSS
50 tmpl.cache['header'] = new_header
51 tmpl.cache['header'] = new_header
@@ -89,7 +90,7 b' def pygmentize(field, fctx, style, tmpl,'
89 coloriter = (s.encode(encoding.encoding, 'replace')
90 coloriter = (s.encode(encoding.encoding, 'replace')
90 for s in colorized.splitlines())
91 for s in colorized.splitlines())
91
92
92 tmpl.filters['colorize'] = lambda x: next(coloriter)
93 tmpl._filters['colorize'] = lambda x: next(coloriter)
93
94
94 oldl = tmpl.cache[field]
95 oldl = tmpl.cache[field]
95 newl = oldl.replace('line|escape', 'line|colorize')
96 newl = oldl.replace('line|escape', 'line|colorize')
@@ -183,7 +183,6 b' unexpectedly::'
183
183
184 from __future__ import absolute_import
184 from __future__ import absolute_import
185
185
186 import errno
187 import os
186 import os
188
187
189 from mercurial.i18n import _
188 from mercurial.i18n import _
@@ -207,6 +206,7 b' from mercurial import ('
207 registrar,
206 registrar,
208 repair,
207 repair,
209 scmutil,
208 scmutil,
209 state as statemod,
210 util,
210 util,
211 )
211 )
212 from mercurial.utils import (
212 from mercurial.utils import (
@@ -304,6 +304,7 b' class histeditstate(object):'
304 self.lock = lock
304 self.lock = lock
305 self.wlock = wlock
305 self.wlock = wlock
306 self.backupfile = None
306 self.backupfile = None
307 self.stateobj = statemod.cmdstate(repo, 'histedit-state')
307 if replacements is None:
308 if replacements is None:
308 self.replacements = []
309 self.replacements = []
309 else:
310 else:
@@ -311,29 +312,33 b' class histeditstate(object):'
311
312
312 def read(self):
313 def read(self):
313 """Load histedit state from disk and set fields appropriately."""
314 """Load histedit state from disk and set fields appropriately."""
314 try:
315 if not self.stateobj.exists():
315 state = self.repo.vfs.read('histedit-state')
316 except IOError as err:
317 if err.errno != errno.ENOENT:
318 raise
319 cmdutil.wrongtooltocontinue(self.repo, _('histedit'))
316 cmdutil.wrongtooltocontinue(self.repo, _('histedit'))
320
317
321 if state.startswith('v1\n'):
318 data = self._read()
319
320 self.parentctxnode = data['parentctxnode']
321 actions = parserules(data['rules'], self)
322 self.actions = actions
323 self.keep = data['keep']
324 self.topmost = data['topmost']
325 self.replacements = data['replacements']
326 self.backupfile = data['backupfile']
327
328 def _read(self):
329 fp = self.repo.vfs.read('histedit-state')
330 if fp.startswith('v1\n'):
322 data = self._load()
331 data = self._load()
323 parentctxnode, rules, keep, topmost, replacements, backupfile = data
332 parentctxnode, rules, keep, topmost, replacements, backupfile = data
324 else:
333 else:
325 data = pickle.loads(state)
334 data = pickle.loads(fp)
326 parentctxnode, rules, keep, topmost, replacements = data
335 parentctxnode, rules, keep, topmost, replacements = data
327 backupfile = None
336 backupfile = None
337 rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules])
328
338
329 self.parentctxnode = parentctxnode
339 return {'parentctxnode': parentctxnode, "rules": rules, "keep": keep,
330 rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules])
340 "topmost": topmost, "replacements": replacements,
331 actions = parserules(rules, self)
341 "backupfile": backupfile}
332 self.actions = actions
333 self.keep = keep
334 self.topmost = topmost
335 self.replacements = replacements
336 self.backupfile = backupfile
337
342
338 def write(self, tr=None):
343 def write(self, tr=None):
339 if tr:
344 if tr:
@@ -779,9 +784,7 b' class fold(histeditaction):'
779
784
780 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
785 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
781 parent = ctx.parents()[0].node()
786 parent = ctx.parents()[0].node()
782 repo.ui.pushbuffer()
787 hg.updaterepo(repo, parent, overwrite=False)
783 hg.update(repo, parent)
784 repo.ui.popbuffer()
785 ### prepare new commit data
788 ### prepare new commit data
786 commitopts = {}
789 commitopts = {}
787 commitopts['user'] = ctx.user()
790 commitopts['user'] = ctx.user()
@@ -812,9 +815,7 b' class fold(histeditaction):'
812 skipprompt=self.skipprompt())
815 skipprompt=self.skipprompt())
813 if n is None:
816 if n is None:
814 return ctx, []
817 return ctx, []
815 repo.ui.pushbuffer()
818 hg.updaterepo(repo, n, overwrite=False)
816 hg.update(repo, n)
817 repo.ui.popbuffer()
818 replacements = [(oldctx.node(), (newnode,)),
819 replacements = [(oldctx.node(), (newnode,)),
819 (ctx.node(), (n,)),
820 (ctx.node(), (n,)),
820 (newnode, (n,)),
821 (newnode, (n,)),
@@ -1109,6 +1110,8 b' def _histedit(ui, repo, state, *freeargs'
1109 fm.startitem()
1110 fm.startitem()
1110 goal = _getgoal(opts)
1111 goal = _getgoal(opts)
1111 revs = opts.get('rev', [])
1112 revs = opts.get('rev', [])
1113 # experimental config: ui.history-editing-backup
1114 nobackup = not ui.configbool('ui', 'history-editing-backup')
1112 rules = opts.get('commands', '')
1115 rules = opts.get('commands', '')
1113 state.keep = opts.get('keep', False)
1116 state.keep = opts.get('keep', False)
1114
1117
@@ -1122,7 +1125,7 b' def _histedit(ui, repo, state, *freeargs'
1122 _edithisteditplan(ui, repo, state, rules)
1125 _edithisteditplan(ui, repo, state, rules)
1123 return
1126 return
1124 elif goal == goalabort:
1127 elif goal == goalabort:
1125 _aborthistedit(ui, repo, state)
1128 _aborthistedit(ui, repo, state, nobackup=nobackup)
1126 return
1129 return
1127 else:
1130 else:
1128 # goal == goalnew
1131 # goal == goalnew
@@ -1149,8 +1152,6 b' def _continuehistedit(ui, repo, state):'
1149 # even if there's an exception before the first transaction serialize.
1152 # even if there's an exception before the first transaction serialize.
1150 state.write()
1153 state.write()
1151
1154
1152 total = len(state.actions)
1153 pos = 0
1154 tr = None
1155 tr = None
1155 # Don't use singletransaction by default since it rolls the entire
1156 # Don't use singletransaction by default since it rolls the entire
1156 # transaction back if an unexpected exception happens (like a
1157 # transaction back if an unexpected exception happens (like a
@@ -1160,13 +1161,13 b' def _continuehistedit(ui, repo, state):'
1160 # and reopen a transaction. For example, if the action executes an
1161 # and reopen a transaction. For example, if the action executes an
1161 # external process it may choose to commit the transaction first.
1162 # external process it may choose to commit the transaction first.
1162 tr = repo.transaction('histedit')
1163 tr = repo.transaction('histedit')
1163 with util.acceptintervention(tr):
1164 progress = ui.makeprogress(_("editing"), unit=_('changes'),
1165 total=len(state.actions))
1166 with progress, util.acceptintervention(tr):
1164 while state.actions:
1167 while state.actions:
1165 state.write(tr=tr)
1168 state.write(tr=tr)
1166 actobj = state.actions[0]
1169 actobj = state.actions[0]
1167 pos += 1
1170 progress.increment(item=actobj.torule())
1168 ui.progress(_("editing"), pos, actobj.torule(),
1169 _('changes'), total)
1170 ui.debug('histedit: processing %s %s\n' % (actobj.verb,\
1171 ui.debug('histedit: processing %s %s\n' % (actobj.verb,\
1171 actobj.torule()))
1172 actobj.torule()))
1172 parentctx, replacement_ = actobj.run()
1173 parentctx, replacement_ = actobj.run()
@@ -1175,13 +1176,10 b' def _continuehistedit(ui, repo, state):'
1175 state.actions.pop(0)
1176 state.actions.pop(0)
1176
1177
1177 state.write()
1178 state.write()
1178 ui.progress(_("editing"), None)
1179
1179
1180 def _finishhistedit(ui, repo, state, fm):
1180 def _finishhistedit(ui, repo, state, fm):
1181 """This action runs when histedit is finishing its session"""
1181 """This action runs when histedit is finishing its session"""
1182 repo.ui.pushbuffer()
1182 hg.updaterepo(repo, state.parentctxnode, overwrite=False)
1183 hg.update(repo, state.parentctxnode, quietempty=True)
1184 repo.ui.popbuffer()
1185
1183
1186 mapping, tmpnodes, created, ntm = processreplacement(state)
1184 mapping, tmpnodes, created, ntm = processreplacement(state)
1187 if mapping:
1185 if mapping:
@@ -1225,7 +1223,7 b' def _finishhistedit(ui, repo, state, fm)'
1225 if repo.vfs.exists('histedit-last-edit.txt'):
1223 if repo.vfs.exists('histedit-last-edit.txt'):
1226 repo.vfs.unlink('histedit-last-edit.txt')
1224 repo.vfs.unlink('histedit-last-edit.txt')
1227
1225
1228 def _aborthistedit(ui, repo, state):
1226 def _aborthistedit(ui, repo, state, nobackup=False):
1229 try:
1227 try:
1230 state.read()
1228 state.read()
1231 __, leafs, tmpnodes, __ = processreplacement(state)
1229 __, leafs, tmpnodes, __ = processreplacement(state)
@@ -1247,8 +1245,8 b' def _aborthistedit(ui, repo, state):'
1247 if repo.unfiltered().revs('parents() and (%n or %ln::)',
1245 if repo.unfiltered().revs('parents() and (%n or %ln::)',
1248 state.parentctxnode, leafs | tmpnodes):
1246 state.parentctxnode, leafs | tmpnodes):
1249 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
1247 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
1250 cleanupnode(ui, repo, tmpnodes)
1248 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
1251 cleanupnode(ui, repo, leafs)
1249 cleanupnode(ui, repo, leafs, nobackup=nobackup)
1252 except Exception:
1250 except Exception:
1253 if state.inprogress():
1251 if state.inprogress():
1254 ui.warn(_('warning: encountered an exception during histedit '
1252 ui.warn(_('warning: encountered an exception during histedit '
@@ -1605,7 +1603,7 b' def movetopmostbookmarks(repo, oldtopmos'
1605 changes.append((name, newtopmost))
1603 changes.append((name, newtopmost))
1606 marks.applychanges(repo, tr, changes)
1604 marks.applychanges(repo, tr, changes)
1607
1605
1608 def cleanupnode(ui, repo, nodes):
1606 def cleanupnode(ui, repo, nodes, nobackup=False):
1609 """strip a group of nodes from the repository
1607 """strip a group of nodes from the repository
1610
1608
1611 The set of node to strip may contains unknown nodes."""
1609 The set of node to strip may contains unknown nodes."""
@@ -1620,7 +1618,8 b' def cleanupnode(ui, repo, nodes):'
1620 nodes = sorted(n for n in nodes if n in nm)
1618 nodes = sorted(n for n in nodes if n in nm)
1621 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
1619 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
1622 if roots:
1620 if roots:
1623 repair.strip(ui, repo, roots)
1621 backup = not nobackup
1622 repair.strip(ui, repo, roots, backup=backup)
1624
1623
1625 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
1624 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
1626 if isinstance(nodelist, str):
1625 if isinstance(nodelist, str):
@@ -94,7 +94,6 b' import random'
94 import re
94 import re
95 import socket
95 import socket
96 import subprocess
96 import subprocess
97 import tempfile
98 import time
97 import time
99
98
100 from mercurial.node import (
99 from mercurial.node import (
@@ -565,19 +564,19 b' def _lookupwrap(orig):'
565 if isinstance(localkey, str) and _scratchbranchmatcher(localkey):
564 if isinstance(localkey, str) and _scratchbranchmatcher(localkey):
566 scratchnode = repo.bundlestore.index.getnode(localkey)
565 scratchnode = repo.bundlestore.index.getnode(localkey)
567 if scratchnode:
566 if scratchnode:
568 return "%s %s\n" % (1, scratchnode)
567 return "%d %s\n" % (1, scratchnode)
569 else:
568 else:
570 return "%s %s\n" % (0, 'scratch branch %s not found' % localkey)
569 return "%d %s\n" % (0, 'scratch branch %s not found' % localkey)
571 else:
570 else:
572 try:
571 try:
573 r = hex(repo.lookup(localkey))
572 r = hex(repo.lookup(localkey))
574 return "%s %s\n" % (1, r)
573 return "%d %s\n" % (1, r)
575 except Exception as inst:
574 except Exception as inst:
576 if repo.bundlestore.index.getbundle(localkey):
575 if repo.bundlestore.index.getbundle(localkey):
577 return "%s %s\n" % (1, localkey)
576 return "%d %s\n" % (1, localkey)
578 else:
577 else:
579 r = str(inst)
578 r = stringutil.forcebytestr(inst)
580 return "%s %s\n" % (0, r)
579 return "%d %s\n" % (0, r)
581 return _lookup
580 return _lookup
582
581
583 def _pull(orig, ui, repo, source="default", **opts):
582 def _pull(orig, ui, repo, source="default", **opts):
@@ -912,7 +911,7 b' def storetobundlestore(orig, repo, op, u'
912
911
913 # storing the bundle in the bundlestore
912 # storing the bundle in the bundlestore
914 buf = util.chunkbuffer(bundler.getchunks())
913 buf = util.chunkbuffer(bundler.getchunks())
915 fd, bundlefile = tempfile.mkstemp()
914 fd, bundlefile = pycompat.mkstemp()
916 try:
915 try:
917 try:
916 try:
918 fp = os.fdopen(fd, r'wb')
917 fp = os.fdopen(fd, r'wb')
@@ -998,7 +997,7 b' def processparts(orig, repo, op, unbundl'
998 # If commits were sent, store them
997 # If commits were sent, store them
999 if cgparams:
998 if cgparams:
1000 buf = util.chunkbuffer(bundler.getchunks())
999 buf = util.chunkbuffer(bundler.getchunks())
1001 fd, bundlefile = tempfile.mkstemp()
1000 fd, bundlefile = pycompat.mkstemp()
1002 try:
1001 try:
1003 try:
1002 try:
1004 fp = os.fdopen(fd, r'wb')
1003 fp = os.fdopen(fd, r'wb')
@@ -1110,7 +1109,7 b' def bundle2scratchbranch(op, part):'
1110 bundler.addpart(cgpart)
1109 bundler.addpart(cgpart)
1111 buf = util.chunkbuffer(bundler.getchunks())
1110 buf = util.chunkbuffer(bundler.getchunks())
1112
1111
1113 fd, bundlefile = tempfile.mkstemp()
1112 fd, bundlefile = pycompat.mkstemp()
1114 try:
1113 try:
1115 try:
1114 try:
1116 fp = os.fdopen(fd, r'wb')
1115 fp = os.fdopen(fd, r'wb')
@@ -6,13 +6,13 b''
6 from __future__ import absolute_import
6 from __future__ import absolute_import
7
7
8 import os
8 import os
9 import tempfile
10
9
11 from mercurial.node import hex
10 from mercurial.node import hex
12
11
13 from mercurial import (
12 from mercurial import (
14 error,
13 error,
15 extensions,
14 extensions,
15 pycompat,
16 )
16 )
17
17
18 def isremotebooksenabled(ui):
18 def isremotebooksenabled(ui):
@@ -30,7 +30,7 b' def downloadbundle(repo, unknownbinhead)'
30
30
31 def _makebundlefromraw(data):
31 def _makebundlefromraw(data):
32 fp = None
32 fp = None
33 fd, bundlefile = tempfile.mkstemp()
33 fd, bundlefile = pycompat.mkstemp()
34 try: # guards bundlefile
34 try: # guards bundlefile
35 try: # guards fp
35 try: # guards fp
36 fp = os.fdopen(fd, 'wb')
36 fp = os.fdopen(fd, 'wb')
@@ -120,6 +120,8 b' class externalbundlestore(abstractbundle'
120 def write(self, data):
120 def write(self, data):
121 # Won't work on windows because you can't open file second time without
121 # Won't work on windows because you can't open file second time without
122 # closing it
122 # closing it
123 # TODO: rewrite without str.format() and replace NamedTemporaryFile()
124 # with pycompat.namedtempfile()
123 with NamedTemporaryFile() as temp:
125 with NamedTemporaryFile() as temp:
124 temp.write(data)
126 temp.write(data)
125 temp.flush()
127 temp.flush()
@@ -142,6 +144,8 b' class externalbundlestore(abstractbundle'
142 def read(self, handle):
144 def read(self, handle):
143 # Won't work on windows because you can't open file second time without
145 # Won't work on windows because you can't open file second time without
144 # closing it
146 # closing it
147 # TODO: rewrite without str.format() and replace NamedTemporaryFile()
148 # with pycompat.namedtempfile()
145 with NamedTemporaryFile() as temp:
149 with NamedTemporaryFile() as temp:
146 formatted_args = [arg.format(filename=temp.name, handle=handle)
150 formatted_args = [arg.format(filename=temp.name, handle=handle)
147 for arg in self.get_args]
151 for arg in self.get_args]
@@ -87,7 +87,6 b' from __future__ import absolute_import'
87
87
88 import os
88 import os
89 import re
89 import re
90 import tempfile
91 import weakref
90 import weakref
92
91
93 from mercurial.i18n import _
92 from mercurial.i18n import _
@@ -246,7 +245,7 b' class kwtemplater(object):'
246 @util.propertycache
245 @util.propertycache
247 def escape(self):
246 def escape(self):
248 '''Returns bar-separated and escaped keywords.'''
247 '''Returns bar-separated and escaped keywords.'''
249 return '|'.join(map(re.escape, self.templates.keys()))
248 return '|'.join(map(stringutil.reescape, self.templates.keys()))
250
249
251 @util.propertycache
250 @util.propertycache
252 def rekw(self):
251 def rekw(self):
@@ -434,7 +433,7 b' def demo(ui, repo, *args, **opts):'
434 ui.write('%s = %s\n' % (k, v))
433 ui.write('%s = %s\n' % (k, v))
435
434
436 fn = 'demo.txt'
435 fn = 'demo.txt'
437 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
436 tmpdir = pycompat.mkdtemp('', 'kwdemo.')
438 ui.note(_('creating temporary repository at %s\n') % tmpdir)
437 ui.note(_('creating temporary repository at %s\n') % tmpdir)
439 if repo is None:
438 if repo is None:
440 baseui = ui
439 baseui = ui
@@ -62,9 +62,10 b' class basestore(object):'
62
62
63 at = 0
63 at = 0
64 available = self.exists(set(hash for (_filename, hash) in files))
64 available = self.exists(set(hash for (_filename, hash) in files))
65 progress = ui.makeprogress(_('getting largefiles'), unit=_('files'),
66 total=len(files))
65 for filename, hash in files:
67 for filename, hash in files:
66 ui.progress(_('getting largefiles'), at, unit=_('files'),
68 progress.update(at)
67 total=len(files))
68 at += 1
69 at += 1
69 ui.note(_('getting %s:%s\n') % (filename, hash))
70 ui.note(_('getting %s:%s\n') % (filename, hash))
70
71
@@ -79,7 +80,7 b' class basestore(object):'
79 else:
80 else:
80 missing.append(filename)
81 missing.append(filename)
81
82
82 ui.progress(_('getting largefiles'), None)
83 progress.complete()
83 return (success, missing)
84 return (success, missing)
84
85
85 def _gethash(self, filename, hash):
86 def _gethash(self, filename, hash):
@@ -118,12 +118,14 b' def lfconvert(ui, src, dest, *pats, **op'
118 matcher = None
118 matcher = None
119
119
120 lfiletohash = {}
120 lfiletohash = {}
121 progress = ui.makeprogress(_('converting revisions'),
122 unit=_('revisions'),
123 total=rsrc['tip'].rev())
121 for ctx in ctxs:
124 for ctx in ctxs:
122 ui.progress(_('converting revisions'), ctx.rev(),
125 progress.update(ctx.rev())
123 unit=_('revisions'), total=rsrc['tip'].rev())
124 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
126 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
125 lfiles, normalfiles, matcher, size, lfiletohash)
127 lfiles, normalfiles, matcher, size, lfiletohash)
126 ui.progress(_('converting revisions'), None)
128 progress.complete()
127
129
128 if rdst.wvfs.exists(lfutil.shortname):
130 if rdst.wvfs.exists(lfutil.shortname):
129 rdst.wvfs.rmtree(lfutil.shortname)
131 rdst.wvfs.rmtree(lfutil.shortname)
@@ -368,9 +370,10 b' def uploadlfiles(ui, rsrc, rdst, files):'
368 files = [h for h in files if not retval[h]]
370 files = [h for h in files if not retval[h]]
369 ui.debug("%d largefiles need to be uploaded\n" % len(files))
371 ui.debug("%d largefiles need to be uploaded\n" % len(files))
370
372
373 progress = ui.makeprogress(_('uploading largefiles'), unit=_('files'),
374 total=len(files))
371 for hash in files:
375 for hash in files:
372 ui.progress(_('uploading largefiles'), at, unit=_('files'),
376 progress.update(at)
373 total=len(files))
374 source = lfutil.findfile(rsrc, hash)
377 source = lfutil.findfile(rsrc, hash)
375 if not source:
378 if not source:
376 raise error.Abort(_('largefile %s missing from store'
379 raise error.Abort(_('largefile %s missing from store'
@@ -378,7 +381,7 b' def uploadlfiles(ui, rsrc, rdst, files):'
378 # XXX check for errors here
381 # XXX check for errors here
379 store.put(source, hash)
382 store.put(source, hash)
380 at += 1
383 at += 1
381 ui.progress(_('uploading largefiles'), None)
384 progress.complete()
382
385
383 def verifylfiles(ui, repo, all=False, contents=False):
386 def verifylfiles(ui, repo, all=False, contents=False):
384 '''Verify that every largefile revision in the current changeset
387 '''Verify that every largefile revision in the current changeset
@@ -501,9 +501,10 b' def getlfilestoupdate(oldstandins, newst'
501 return filelist
501 return filelist
502
502
503 def getlfilestoupload(repo, missing, addfunc):
503 def getlfilestoupload(repo, missing, addfunc):
504 progress = repo.ui.makeprogress(_('finding outgoing largefiles'),
505 unit=_('revisions'), total=len(missing))
504 for i, n in enumerate(missing):
506 for i, n in enumerate(missing):
505 repo.ui.progress(_('finding outgoing largefiles'), i,
507 progress.update(i)
506 unit=_('revisions'), total=len(missing))
507 parents = [p for p in repo[n].parents() if p != node.nullid]
508 parents = [p for p in repo[n].parents() if p != node.nullid]
508
509
509 oldlfstatus = repo.lfstatus
510 oldlfstatus = repo.lfstatus
@@ -530,7 +531,7 b' def getlfilestoupload(repo, missing, add'
530 for fn in files:
531 for fn in files:
531 if isstandin(fn) and fn in ctx:
532 if isstandin(fn) and fn in ctx:
532 addfunc(fn, readasstandin(ctx[fn]))
533 addfunc(fn, readasstandin(ctx[fn]))
533 repo.ui.progress(_('finding outgoing largefiles'), None)
534 progress.complete()
534
535
535 def updatestandinsbymatch(repo, match):
536 def updatestandinsbymatch(repo, match):
536 '''Update standins in the working directory according to specified match
537 '''Update standins in the working directory according to specified match
@@ -362,8 +362,10 b' def lfsfileset(mctx, x):'
362 """File that uses LFS storage."""
362 """File that uses LFS storage."""
363 # i18n: "lfs" is a keyword
363 # i18n: "lfs" is a keyword
364 fileset.getargs(x, 0, 0, _("lfs takes no arguments"))
364 fileset.getargs(x, 0, 0, _("lfs takes no arguments"))
365 return [f for f in mctx.subset
365 ctx = mctx.ctx
366 if wrapper.pointerfromctx(mctx.ctx, f, removed=True) is not None]
366 def lfsfilep(f):
367 return wrapper.pointerfromctx(ctx, f, removed=True) is not None
368 return mctx.predicate(lfsfilep, predrepr='<lfs>')
367
369
368 @templatekeyword('lfs_files', requires={'ctx'})
370 @templatekeyword('lfs_files', requires={'ctx'})
369 def lfsfiles(context, mapping):
371 def lfsfiles(context, mapping):
@@ -405,7 +405,8 b' class _gitlfsremote(object):'
405 if len(objects) > 1:
405 if len(objects) > 1:
406 self.ui.note(_('lfs: need to transfer %d objects (%s)\n')
406 self.ui.note(_('lfs: need to transfer %d objects (%s)\n')
407 % (len(objects), util.bytecount(total)))
407 % (len(objects), util.bytecount(total)))
408 self.ui.progress(topic, 0, total=total)
408 progress = self.ui.makeprogress(topic, total=total)
409 progress.update(0)
409 def transfer(chunk):
410 def transfer(chunk):
410 for obj in chunk:
411 for obj in chunk:
411 objsize = obj.get('size', 0)
412 objsize = obj.get('size', 0)
@@ -443,9 +444,9 b' class _gitlfsremote(object):'
443 for _one, oid in oids:
444 for _one, oid in oids:
444 processed += sizes[oid]
445 processed += sizes[oid]
445 blobs += 1
446 blobs += 1
446 self.ui.progress(topic, processed, total=total)
447 progress.update(processed)
447 self.ui.note(_('lfs: processed: %s\n') % oid)
448 self.ui.note(_('lfs: processed: %s\n') % oid)
448 self.ui.progress(topic, pos=None, total=total)
449 progress.complete()
449
450
450 if blobs > 0:
451 if blobs > 0:
451 if action == 'upload':
452 if action == 'upload':
@@ -15,6 +15,9 b' from mercurial import ('
15 error,
15 error,
16 pycompat,
16 pycompat,
17 )
17 )
18 from mercurial.utils import (
19 stringutil,
20 )
18
21
19 class InvalidPointer(error.RevlogError):
22 class InvalidPointer(error.RevlogError):
20 pass
23 pass
@@ -32,7 +35,8 b' class gitlfspointer(dict):'
32 try:
35 try:
33 return cls(l.split(' ', 1) for l in text.splitlines()).validate()
36 return cls(l.split(' ', 1) for l in text.splitlines()).validate()
34 except ValueError: # l.split returns 1 item instead of 2
37 except ValueError: # l.split returns 1 item instead of 2
35 raise InvalidPointer(_('cannot parse git-lfs text: %r') % text)
38 raise InvalidPointer(_('cannot parse git-lfs text: %s')
39 % stringutil.pprint(text))
36
40
37 def serialize(self):
41 def serialize(self):
38 sortkeyfunc = lambda x: (x[0] != 'version', x)
42 sortkeyfunc = lambda x: (x[0] != 'version', x)
@@ -52,7 +56,7 b' class gitlfspointer(dict):'
52 _requiredre = {
56 _requiredre = {
53 'size': re.compile(br'\A[0-9]+\Z'),
57 'size': re.compile(br'\A[0-9]+\Z'),
54 'oid': re.compile(br'\Asha256:[0-9a-f]{64}\Z'),
58 'oid': re.compile(br'\Asha256:[0-9a-f]{64}\Z'),
55 'version': re.compile(br'\A%s\Z' % re.escape(VERSION)),
59 'version': re.compile(br'\A%s\Z' % stringutil.reescape(VERSION)),
56 }
60 }
57
61
58 def validate(self):
62 def validate(self):
@@ -61,15 +65,19 b' class gitlfspointer(dict):'
61 for k, v in self.iteritems():
65 for k, v in self.iteritems():
62 if k in self._requiredre:
66 if k in self._requiredre:
63 if not self._requiredre[k].match(v):
67 if not self._requiredre[k].match(v):
64 raise InvalidPointer(_('unexpected value: %s=%r') % (k, v))
68 raise InvalidPointer(
69 _('unexpected lfs pointer value: %s=%s')
70 % (k, stringutil.pprint(v)))
65 requiredcount += 1
71 requiredcount += 1
66 elif not self._keyre.match(k):
72 elif not self._keyre.match(k):
67 raise InvalidPointer(_('unexpected key: %s') % k)
73 raise InvalidPointer(_('unexpected lfs pointer key: %s') % k)
68 if not self._valuere.match(v):
74 if not self._valuere.match(v):
69 raise InvalidPointer(_('unexpected value: %s=%r') % (k, v))
75 raise InvalidPointer(_('unexpected lfs pointer value: %s=%s')
76 % (k, stringutil.pprint(v)))
70 if len(self._requiredre) != requiredcount:
77 if len(self._requiredre) != requiredcount:
71 miss = sorted(set(self._requiredre.keys()).difference(self.keys()))
78 miss = sorted(set(self._requiredre.keys()).difference(self.keys()))
72 raise InvalidPointer(_('missed keys: %s') % ', '.join(miss))
79 raise InvalidPointer(_('missing lfs pointer keys: %s')
80 % ', '.join(miss))
73 return self
81 return self
74
82
75 deserialize = gitlfspointer.deserialize
83 deserialize = gitlfspointer.deserialize
@@ -492,7 +492,8 b' class queue(object):'
492 n, name = entry
492 n, name = entry
493 yield statusentry(bin(n), name)
493 yield statusentry(bin(n), name)
494 elif l.strip():
494 elif l.strip():
495 self.ui.warn(_('malformated mq status line: %s\n') % entry)
495 self.ui.warn(_('malformated mq status line: %s\n') %
496 stringutil.pprint(entry))
496 # else we ignore empty lines
497 # else we ignore empty lines
497 try:
498 try:
498 lines = self.opener.read(self.statuspath).splitlines()
499 lines = self.opener.read(self.statuspath).splitlines()
@@ -2872,7 +2873,7 b' def guard(ui, repo, *args, **opts):'
2872 patch = None
2873 patch = None
2873 args = list(args)
2874 args = list(args)
2874 if opts.get(r'list'):
2875 if opts.get(r'list'):
2875 if args or opts.get('none'):
2876 if args or opts.get(r'none'):
2876 raise error.Abort(_('cannot mix -l/--list with options or '
2877 raise error.Abort(_('cannot mix -l/--list with options or '
2877 'arguments'))
2878 'arguments'))
2878 for i in xrange(len(q.series)):
2879 for i in xrange(len(q.series)):
@@ -2886,7 +2887,7 b' def guard(ui, repo, *args, **opts):'
2886 patch = args.pop(0)
2887 patch = args.pop(0)
2887 if patch is None:
2888 if patch is None:
2888 raise error.Abort(_('no patch to work with'))
2889 raise error.Abort(_('no patch to work with'))
2889 if args or opts.get('none'):
2890 if args or opts.get(r'none'):
2890 idx = q.findseries(patch)
2891 idx = q.findseries(patch)
2891 if idx is None:
2892 if idx is None:
2892 raise error.Abort(_('no patch named %s') % patch)
2893 raise error.Abort(_('no patch named %s') % patch)
@@ -28,8 +28,6 b' from . import ('
28 narrowchangegroup,
28 narrowchangegroup,
29 narrowcommands,
29 narrowcommands,
30 narrowcopies,
30 narrowcopies,
31 narrowdirstate,
32 narrowmerge,
33 narrowpatch,
31 narrowpatch,
34 narrowrepo,
32 narrowrepo,
35 narrowrevlog,
33 narrowrevlog,
@@ -64,7 +62,6 b' def uisetup(ui):'
64 localrepo.featuresetupfuncs.add(featuresetup)
62 localrepo.featuresetupfuncs.add(featuresetup)
65 narrowrevlog.setup()
63 narrowrevlog.setup()
66 narrowbundle2.setup()
64 narrowbundle2.setup()
67 narrowmerge.setup()
68 narrowcommands.setup()
65 narrowcommands.setup()
69 narrowchangegroup.setup()
66 narrowchangegroup.setup()
70 narrowwirepeer.uisetup()
67 narrowwirepeer.uisetup()
@@ -74,10 +71,9 b' def reposetup(ui, repo):'
74 if not repo.local():
71 if not repo.local():
75 return
72 return
76
73
77 narrowrepo.wraprepo(repo)
78 if changegroup.NARROW_REQUIREMENT in repo.requirements:
74 if changegroup.NARROW_REQUIREMENT in repo.requirements:
75 narrowrepo.wraprepo(repo)
79 narrowcopies.setup(repo)
76 narrowcopies.setup(repo)
80 narrowdirstate.setup(repo)
81 narrowpatch.setup(repo)
77 narrowpatch.setup(repo)
82 narrowwirepeer.reposetup(repo)
78 narrowwirepeer.reposetup(repo)
83
79
@@ -408,6 +408,8 b' def _handlechangespec(op, inpart):'
408 topic='widen')
408 topic='widen')
409 repo._bookmarks = bmstore
409 repo._bookmarks = bmstore
410 if chgrpfile:
410 if chgrpfile:
411 op._widen_uninterr = repo.ui.uninterruptable()
412 op._widen_uninterr.__enter__()
411 # presence of _widen_bundle attribute activates widen handler later
413 # presence of _widen_bundle attribute activates widen handler later
412 op._widen_bundle = chgrpfile
414 op._widen_bundle = chgrpfile
413 # Set the new narrowspec if we're widening. The setnewnarrowpats() method
415 # Set the new narrowspec if we're widening. The setnewnarrowpats() method
@@ -455,6 +457,7 b' def handlechangegroup_widen(op, inpart):'
455 (undovfs.join(undofile), stringutil.forcebytestr(e)))
457 (undovfs.join(undofile), stringutil.forcebytestr(e)))
456
458
457 # Remove partial backup only if there were no exceptions
459 # Remove partial backup only if there were no exceptions
460 op._widen_uninterr.__exit__(None, None, None)
458 vfs.unlink(chgrpfile)
461 vfs.unlink(chgrpfile)
459
462
460 def setup():
463 def setup():
@@ -203,50 +203,51 b' def _narrow(ui, repo, remote, commoninc,'
203 hint=_('use --force-delete-local-changes to '
203 hint=_('use --force-delete-local-changes to '
204 'ignore'))
204 'ignore'))
205
205
206 if revstostrip:
206 with ui.uninterruptable():
207 tostrip = [unfi.changelog.node(r) for r in revstostrip]
207 if revstostrip:
208 if repo['.'].node() in tostrip:
208 tostrip = [unfi.changelog.node(r) for r in revstostrip]
209 # stripping working copy, so move to a different commit first
209 if repo['.'].node() in tostrip:
210 urev = max(repo.revs('(::%n) - %ln + null',
210 # stripping working copy, so move to a different commit first
211 repo['.'].node(), visibletostrip))
211 urev = max(repo.revs('(::%n) - %ln + null',
212 hg.clean(repo, urev)
212 repo['.'].node(), visibletostrip))
213 repair.strip(ui, unfi, tostrip, topic='narrow')
213 hg.clean(repo, urev)
214 repair.strip(ui, unfi, tostrip, topic='narrow')
214
215
215 todelete = []
216 todelete = []
216 for f, f2, size in repo.store.datafiles():
217 for f, f2, size in repo.store.datafiles():
217 if f.startswith('data/'):
218 if f.startswith('data/'):
218 file = f[5:-2]
219 file = f[5:-2]
219 if not newmatch(file):
220 if not newmatch(file):
220 todelete.append(f)
221 todelete.append(f)
221 elif f.startswith('meta/'):
222 elif f.startswith('meta/'):
222 dir = f[5:-13]
223 dir = f[5:-13]
223 dirs = ['.'] + sorted(util.dirs({dir})) + [dir]
224 dirs = ['.'] + sorted(util.dirs({dir})) + [dir]
224 include = True
225 include = True
225 for d in dirs:
226 for d in dirs:
226 visit = newmatch.visitdir(d)
227 visit = newmatch.visitdir(d)
227 if not visit:
228 if not visit:
228 include = False
229 include = False
229 break
230 break
230 if visit == 'all':
231 if visit == 'all':
231 break
232 break
232 if not include:
233 if not include:
233 todelete.append(f)
234 todelete.append(f)
234
235
235 repo.destroying()
236 repo.destroying()
236
237
237 with repo.transaction("narrowing"):
238 with repo.transaction("narrowing"):
238 for f in todelete:
239 for f in todelete:
239 ui.status(_('deleting %s\n') % f)
240 ui.status(_('deleting %s\n') % f)
240 util.unlinkpath(repo.svfs.join(f))
241 util.unlinkpath(repo.svfs.join(f))
241 repo.store.markremoved(f)
242 repo.store.markremoved(f)
242
243
243 for f in repo.dirstate:
244 for f in repo.dirstate:
244 if not newmatch(f):
245 if not newmatch(f):
245 repo.dirstate.drop(f)
246 repo.dirstate.drop(f)
246 repo.wvfs.unlinkpath(f)
247 repo.wvfs.unlinkpath(f)
247 repo.setnarrowpats(newincludes, newexcludes)
248 repo.setnarrowpats(newincludes, newexcludes)
248
249
249 repo.destroyed()
250 repo.destroyed()
250
251
251 def _widen(ui, repo, remote, commoninc, newincludes, newexcludes):
252 def _widen(ui, repo, remote, commoninc, newincludes, newexcludes):
252 newmatch = narrowspec.match(repo.root, newincludes, newexcludes)
253 newmatch = narrowspec.match(repo.root, newincludes, newexcludes)
@@ -269,28 +270,29 b' def _widen(ui, repo, remote, commoninc, '
269 repo.setnarrowpats(newincludes, newexcludes)
270 repo.setnarrowpats(newincludes, newexcludes)
270 repo.setnewnarrowpats = setnewnarrowpats
271 repo.setnewnarrowpats = setnewnarrowpats
271
272
272 ds = repo.dirstate
273 with ui.uninterruptable():
273 p1, p2 = ds.p1(), ds.p2()
274 ds = repo.dirstate
274 with ds.parentchange():
275 p1, p2 = ds.p1(), ds.p2()
275 ds.setparents(node.nullid, node.nullid)
276 with ds.parentchange():
276 common = commoninc[0]
277 ds.setparents(node.nullid, node.nullid)
277 with wrappedextraprepare:
278 common = commoninc[0]
278 exchange.pull(repo, remote, heads=common)
279 with wrappedextraprepare:
279 with ds.parentchange():
280 exchange.pull(repo, remote, heads=common)
280 ds.setparents(p1, p2)
281 with ds.parentchange():
282 ds.setparents(p1, p2)
281
283
282 actions = {k: [] for k in 'a am f g cd dc r dm dg m e k p pr'.split()}
284 actions = {k: [] for k in 'a am f g cd dc r dm dg m e k p pr'.split()}
283 addgaction = actions['g'].append
285 addgaction = actions['g'].append
284
286
285 mf = repo['.'].manifest().matches(newmatch)
287 mf = repo['.'].manifest().matches(newmatch)
286 for f, fn in mf.iteritems():
288 for f, fn in mf.iteritems():
287 if f not in repo.dirstate:
289 if f not in repo.dirstate:
288 addgaction((f, (mf.flags(f), False),
290 addgaction((f, (mf.flags(f), False),
289 "add from widened narrow clone"))
291 "add from widened narrow clone"))
290
292
291 merge.applyupdates(repo, actions, wctx=repo[None],
293 merge.applyupdates(repo, actions, wctx=repo[None],
292 mctx=repo['.'], overwrite=False)
294 mctx=repo['.'], overwrite=False)
293 merge.recordupdates(repo, actions, branchmerge=False)
295 merge.recordupdates(repo, actions, branchmerge=False)
294
296
295 # TODO(rdamazio): Make new matcher format and update description
297 # TODO(rdamazio): Make new matcher format and update description
296 @command('tracked',
298 @command('tracked',
@@ -9,74 +9,91 b' from __future__ import absolute_import'
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import (
11 from mercurial import (
12 dirstate,
13 error,
12 error,
14 extensions,
15 match as matchmod,
13 match as matchmod,
16 narrowspec,
14 narrowspec,
17 util as hgutil,
15 util as hgutil,
18 )
16 )
19
17
20 def setup(repo):
18 def wrapdirstate(repo, dirstate):
21 """Add narrow spec dirstate ignore, block changes outside narrow spec."""
19 """Add narrow spec dirstate ignore, block changes outside narrow spec."""
22
20
23 def walk(orig, self, match, subrepos, unknown, ignored, full=True,
21 def _editfunc(fn):
24 narrowonly=True):
22 def _wrapper(self, *args):
25 if narrowonly:
26 # hack to not exclude explicitly-specified paths so that they can
27 # be warned later on e.g. dirstate.add()
28 em = matchmod.exact(match._root, match._cwd, match.files())
29 nm = matchmod.unionmatcher([repo.narrowmatch(), em])
30 match = matchmod.intersectmatchers(match, nm)
31 return orig(self, match, subrepos, unknown, ignored, full)
32
33 extensions.wrapfunction(dirstate.dirstate, 'walk', walk)
34
35 # Prevent adding files that are outside the sparse checkout
36 editfuncs = ['normal', 'add', 'normallookup', 'copy', 'remove', 'merge']
37 for func in editfuncs:
38 def _wrapper(orig, self, *args):
39 dirstate = repo.dirstate
23 dirstate = repo.dirstate
40 narrowmatch = repo.narrowmatch()
24 narrowmatch = repo.narrowmatch()
41 for f in args:
25 for f in args:
42 if f is not None and not narrowmatch(f) and f not in dirstate:
26 if f is not None and not narrowmatch(f) and f not in dirstate:
43 raise error.Abort(_("cannot track '%s' - it is outside " +
27 raise error.Abort(_("cannot track '%s' - it is outside " +
44 "the narrow clone") % f)
28 "the narrow clone") % f)
45 return orig(self, *args)
29 return fn(self, *args)
46 extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
30 return _wrapper
47
48 def filterrebuild(orig, self, parent, allfiles, changedfiles=None):
49 if changedfiles is None:
50 # Rebuilding entire dirstate, let's filter allfiles to match the
51 # narrowspec.
52 allfiles = [f for f in allfiles if repo.narrowmatch()(f)]
53 orig(self, parent, allfiles, changedfiles)
54
55 extensions.wrapfunction(dirstate.dirstate, 'rebuild', filterrebuild)
56
31
57 def _narrowbackupname(backupname):
32 def _narrowbackupname(backupname):
58 assert 'dirstate' in backupname
33 assert 'dirstate' in backupname
59 return backupname.replace('dirstate', narrowspec.FILENAME)
34 return backupname.replace('dirstate', narrowspec.FILENAME)
60
35
61 def restorebackup(orig, self, tr, backupname):
36 class narrowdirstate(dirstate.__class__):
62 self._opener.rename(_narrowbackupname(backupname), narrowspec.FILENAME,
37 def walk(self, match, subrepos, unknown, ignored, full=True,
63 checkambig=True)
38 narrowonly=True):
64 orig(self, tr, backupname)
39 if narrowonly:
40 # hack to not exclude explicitly-specified paths so that they
41 # can be warned later on e.g. dirstate.add()
42 em = matchmod.exact(match._root, match._cwd, match.files())
43 nm = matchmod.unionmatcher([repo.narrowmatch(), em])
44 match = matchmod.intersectmatchers(match, nm)
45 return super(narrowdirstate, self).walk(match, subrepos, unknown,
46 ignored, full)
65
47
66 extensions.wrapfunction(dirstate.dirstate, 'restorebackup', restorebackup)
48 # Prevent adding/editing/copying/deleting files that are outside the
49 # sparse checkout
50 @_editfunc
51 def normal(self, *args):
52 return super(narrowdirstate, self).normal(*args)
67
53
68 def savebackup(orig, self, tr, backupname):
54 @_editfunc
69 orig(self, tr, backupname)
55 def add(self, *args):
56 return super(narrowdirstate, self).add(*args)
57
58 @_editfunc
59 def normallookup(self, *args):
60 return super(narrowdirstate, self).normallookup(*args)
61
62 @_editfunc
63 def copy(self, *args):
64 return super(narrowdirstate, self).copy(*args)
70
65
71 narrowbackupname = _narrowbackupname(backupname)
66 @_editfunc
72 self._opener.tryunlink(narrowbackupname)
67 def remove(self, *args):
73 hgutil.copyfile(self._opener.join(narrowspec.FILENAME),
68 return super(narrowdirstate, self).remove(*args)
74 self._opener.join(narrowbackupname), hardlink=True)
69
70 @_editfunc
71 def merge(self, *args):
72 return super(narrowdirstate, self).merge(*args)
73
74 def rebuild(self, parent, allfiles, changedfiles=None):
75 if changedfiles is None:
76 # Rebuilding entire dirstate, let's filter allfiles to match the
77 # narrowspec.
78 allfiles = [f for f in allfiles if repo.narrowmatch()(f)]
79 super(narrowdirstate, self).rebuild(parent, allfiles, changedfiles)
75
80
76 extensions.wrapfunction(dirstate.dirstate, 'savebackup', savebackup)
81 def restorebackup(self, tr, backupname):
82 self._opener.rename(_narrowbackupname(backupname),
83 narrowspec.FILENAME, checkambig=True)
84 super(narrowdirstate, self).restorebackup(tr, backupname)
85
86 def savebackup(self, tr, backupname):
87 super(narrowdirstate, self).savebackup(tr, backupname)
77
88
78 def clearbackup(orig, self, tr, backupname):
89 narrowbackupname = _narrowbackupname(backupname)
79 orig(self, tr, backupname)
90 self._opener.tryunlink(narrowbackupname)
80 self._opener.unlink(_narrowbackupname(backupname))
91 hgutil.copyfile(self._opener.join(narrowspec.FILENAME),
92 self._opener.join(narrowbackupname), hardlink=True)
81
93
82 extensions.wrapfunction(dirstate.dirstate, 'clearbackup', clearbackup)
94 def clearbackup(self, tr, backupname):
95 super(narrowdirstate, self).clearbackup(tr, backupname)
96 self._opener.unlink(_narrowbackupname(backupname))
97
98 dirstate.__class__ = narrowdirstate
99 return dirstate
@@ -15,6 +15,7 b' from mercurial import ('
15 )
15 )
16
16
17 from . import (
17 from . import (
18 narrowdirstate,
18 narrowrevlog,
19 narrowrevlog,
19 )
20 )
20
21
@@ -62,4 +63,8 b' def wraprepo(repo):'
62 return scmutil.status(modified, added, removed, deleted, unknown,
63 return scmutil.status(modified, added, removed, deleted, unknown,
63 ignored, clean)
64 ignored, clean)
64
65
66 def _makedirstate(self):
67 dirstate = super(narrowrepository, self)._makedirstate()
68 return narrowdirstate.wrapdirstate(self, dirstate)
69
65 repo.__class__ = narrowrepository
70 repo.__class__ = narrowrepository
@@ -113,6 +113,9 b' notify.maxsubject'
113 notify.diffstat
113 notify.diffstat
114 Set to True to include a diffstat before diff content. Default: True.
114 Set to True to include a diffstat before diff content. Default: True.
115
115
116 notify.showfunc
117 If set, override ``diff.showfunc`` for the diff content. Default: None.
118
116 notify.merge
119 notify.merge
117 If True, send notifications for merge changesets. Default: True.
120 If True, send notifications for merge changesets. Default: True.
118
121
@@ -206,6 +209,9 b" configitem('notify', 'outgoing',"
206 configitem('notify', 'sources',
209 configitem('notify', 'sources',
207 default='serve',
210 default='serve',
208 )
211 )
212 configitem('notify', 'showfunc',
213 default=None,
214 )
209 configitem('notify', 'strip',
215 configitem('notify', 'strip',
210 default=0,
216 default=0,
211 )
217 )
@@ -260,6 +266,9 b' class notifier(object):'
260 self.charsets = mail._charsets(self.ui)
266 self.charsets = mail._charsets(self.ui)
261 self.subs = self.subscribers()
267 self.subs = self.subscribers()
262 self.merge = self.ui.configbool('notify', 'merge')
268 self.merge = self.ui.configbool('notify', 'merge')
269 self.showfunc = self.ui.configbool('notify', 'showfunc')
270 if self.showfunc is None:
271 self.showfunc = self.ui.configbool('diff', 'showfunc')
263
272
264 mapfile = None
273 mapfile = None
265 template = (self.ui.config('notify', hooktype) or
274 template = (self.ui.config('notify', hooktype) or
@@ -420,8 +429,9 b' class notifier(object):'
420 ref = ref.node()
429 ref = ref.node()
421 else:
430 else:
422 ref = ctx.node()
431 ref = ctx.node()
423 chunks = patch.diff(self.repo, prev, ref,
432 diffopts = patch.diffallopts(self.ui)
424 opts=patch.diffallopts(self.ui))
433 diffopts.showfunc = self.showfunc
434 chunks = patch.diff(self.repo, prev, ref, opts=diffopts)
425 difflines = ''.join(chunks).splitlines()
435 difflines = ''.join(chunks).splitlines()
426
436
427 if self.ui.configbool('notify', 'diffstat'):
437 if self.ui.configbool('notify', 'diffstat'):
@@ -75,11 +75,12 b' from __future__ import absolute_import'
75
75
76 import email as emailmod
76 import email as emailmod
77 import email.generator as emailgen
77 import email.generator as emailgen
78 import email.mime.base as emimebase
79 import email.mime.multipart as emimemultipart
78 import email.utils as eutil
80 import email.utils as eutil
79 import errno
81 import errno
80 import os
82 import os
81 import socket
83 import socket
82 import tempfile
83
84
84 from mercurial.i18n import _
85 from mercurial.i18n import _
85 from mercurial import (
86 from mercurial import (
@@ -94,7 +95,6 b' from mercurial import ('
94 patch,
95 patch,
95 pycompat,
96 pycompat,
96 registrar,
97 registrar,
97 repair,
98 scmutil,
98 scmutil,
99 templater,
99 templater,
100 util,
100 util,
@@ -256,7 +256,7 b' def makepatch(ui, repo, rev, patchlines,'
256 body += '\n'.join(patchlines)
256 body += '\n'.join(patchlines)
257
257
258 if addattachment:
258 if addattachment:
259 msg = emailmod.MIMEMultipart.MIMEMultipart()
259 msg = emimemultipart.MIMEMultipart()
260 if body:
260 if body:
261 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
261 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
262 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
262 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
@@ -318,7 +318,7 b' def _getbundle(repo, dest, **opts):'
318 The bundle is a returned as a single in-memory binary blob.
318 The bundle is a returned as a single in-memory binary blob.
319 """
319 """
320 ui = repo.ui
320 ui = repo.ui
321 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
321 tmpdir = pycompat.mkdtemp(prefix='hg-email-bundle-')
322 tmpfn = os.path.join(tmpdir, 'bundle')
322 tmpfn = os.path.join(tmpdir, 'bundle')
323 btype = ui.config('patchbomb', 'bundletype')
323 btype = ui.config('patchbomb', 'bundletype')
324 if btype:
324 if btype:
@@ -367,10 +367,10 b' def _getbundlemsgs(repo, sender, bundle,'
367 or prompt(ui, 'Subject:', 'A bundle for your repository'))
367 or prompt(ui, 'Subject:', 'A bundle for your repository'))
368
368
369 body = _getdescription(repo, '', sender, **opts)
369 body = _getdescription(repo, '', sender, **opts)
370 msg = emailmod.MIMEMultipart.MIMEMultipart()
370 msg = emimemultipart.MIMEMultipart()
371 if body:
371 if body:
372 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(r'test')))
372 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get(r'test')))
373 datapart = emailmod.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
373 datapart = emimebase.MIMEBase('application', 'x-mercurial-bundle')
374 datapart.set_payload(bundle)
374 datapart.set_payload(bundle)
375 bundlename = '%s.hg' % opts.get(r'bundlename', 'bundle')
375 bundlename = '%s.hg' % opts.get(r'bundlename', 'bundle')
376 datapart.add_header('Content-Disposition', 'attachment',
376 datapart.add_header('Content-Disposition', 'attachment',
@@ -624,7 +624,7 b' def email(ui, repo, *revs, **opts):'
624 elif bookmark:
624 elif bookmark:
625 if bookmark not in repo._bookmarks:
625 if bookmark not in repo._bookmarks:
626 raise error.Abort(_("bookmark '%s' not found") % bookmark)
626 raise error.Abort(_("bookmark '%s' not found") % bookmark)
627 revs = repair.stripbmrevset(repo, bookmark)
627 revs = scmutil.bookmarkrevs(repo, bookmark)
628
628
629 revs = scmutil.revrange(repo, revs)
629 revs = scmutil.revrange(repo, revs)
630 if outgoing:
630 if outgoing:
@@ -753,6 +753,7 b' def email(ui, repo, *revs, **opts):'
753 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
753 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
754 sendmail = None
754 sendmail = None
755 firstpatch = None
755 firstpatch = None
756 progress = ui.makeprogress(_('sending'), unit=_('emails'), total=len(msgs))
756 for i, (m, subj, ds) in enumerate(msgs):
757 for i, (m, subj, ds) in enumerate(msgs):
757 try:
758 try:
758 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
759 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
@@ -793,8 +794,7 b' def email(ui, repo, *revs, **opts):'
793 if not sendmail:
794 if not sendmail:
794 sendmail = mail.connect(ui, mbox=mbox)
795 sendmail = mail.connect(ui, mbox=mbox)
795 ui.status(_('sending '), subj, ' ...\n')
796 ui.status(_('sending '), subj, ' ...\n')
796 ui.progress(_('sending'), i, item=subj, total=len(msgs),
797 progress.update(i, item=subj)
797 unit=_('emails'))
798 if not mbox:
798 if not mbox:
799 # Exim does not remove the Bcc field
799 # Exim does not remove the Bcc field
800 del m['Bcc']
800 del m['Bcc']
@@ -803,5 +803,4 b' def email(ui, repo, *revs, **opts):'
803 generator.flatten(m, 0)
803 generator.flatten(m, 0)
804 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
804 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
805
805
806 ui.progress(_('writing'), None)
806 progress.complete()
807 ui.progress(_('sending'), None)
@@ -34,7 +34,6 b' from mercurial import ('
34 error,
34 error,
35 extensions,
35 extensions,
36 hg,
36 hg,
37 lock,
38 merge as mergemod,
37 merge as mergemod,
39 mergeutil,
38 mergeutil,
40 obsolete,
39 obsolete,
@@ -48,11 +47,10 b' from mercurial import ('
48 revsetlang,
47 revsetlang,
49 scmutil,
48 scmutil,
50 smartset,
49 smartset,
50 state as statemod,
51 util,
51 util,
52 )
52 )
53
53
54 release = lock.release
55
56 # The following constants are used throughout the rebase module. The ordering of
54 # The following constants are used throughout the rebase module. The ordering of
57 # their values must be maintained.
55 # their values must be maintained.
58
56
@@ -184,6 +182,7 b' class rebaseruntime(object):'
184 self.obsoletenotrebased = {}
182 self.obsoletenotrebased = {}
185 self.obsoletewithoutsuccessorindestination = set()
183 self.obsoletewithoutsuccessorindestination = set()
186 self.inmemory = inmemory
184 self.inmemory = inmemory
185 self.stateobj = statemod.cmdstate(repo, 'rebasestate')
187
186
188 @property
187 @property
189 def repo(self):
188 def repo(self):
@@ -225,40 +224,55 b' class rebaseruntime(object):'
225
224
226 def restorestatus(self):
225 def restorestatus(self):
227 """Restore a previously stored status"""
226 """Restore a previously stored status"""
227 if not self.stateobj.exists():
228 cmdutil.wrongtooltocontinue(self.repo, _('rebase'))
229
230 data = self._read()
231 self.repo.ui.debug('rebase status resumed\n')
232
233 self.originalwd = data['originalwd']
234 self.destmap = data['destmap']
235 self.state = data['state']
236 self.skipped = data['skipped']
237 self.collapsef = data['collapse']
238 self.keepf = data['keep']
239 self.keepbranchesf = data['keepbranches']
240 self.external = data['external']
241 self.activebookmark = data['activebookmark']
242
243 def _read(self):
228 self.prepared = True
244 self.prepared = True
229 repo = self.repo
245 repo = self.repo
230 assert repo.filtername is None
246 assert repo.filtername is None
231 keepbranches = None
247 data = {'keepbranches': None, 'collapse': None, 'activebookmark': None,
248 'external': nullrev, 'keep': None, 'originalwd': None}
232 legacydest = None
249 legacydest = None
233 collapse = False
234 external = nullrev
235 activebookmark = None
236 state = {}
250 state = {}
237 destmap = {}
251 destmap = {}
238
252
239 try:
253 if True:
240 f = repo.vfs("rebasestate")
254 f = repo.vfs("rebasestate")
241 for i, l in enumerate(f.read().splitlines()):
255 for i, l in enumerate(f.read().splitlines()):
242 if i == 0:
256 if i == 0:
243 originalwd = repo[l].rev()
257 data['originalwd'] = repo[l].rev()
244 elif i == 1:
258 elif i == 1:
245 # this line should be empty in newer version. but legacy
259 # this line should be empty in newer version. but legacy
246 # clients may still use it
260 # clients may still use it
247 if l:
261 if l:
248 legacydest = repo[l].rev()
262 legacydest = repo[l].rev()
249 elif i == 2:
263 elif i == 2:
250 external = repo[l].rev()
264 data['external'] = repo[l].rev()
251 elif i == 3:
265 elif i == 3:
252 collapse = bool(int(l))
266 data['collapse'] = bool(int(l))
253 elif i == 4:
267 elif i == 4:
254 keep = bool(int(l))
268 data['keep'] = bool(int(l))
255 elif i == 5:
269 elif i == 5:
256 keepbranches = bool(int(l))
270 data['keepbranches'] = bool(int(l))
257 elif i == 6 and not (len(l) == 81 and ':' in l):
271 elif i == 6 and not (len(l) == 81 and ':' in l):
258 # line 6 is a recent addition, so for backwards
272 # line 6 is a recent addition, so for backwards
259 # compatibility check that the line doesn't look like the
273 # compatibility check that the line doesn't look like the
260 # oldrev:newrev lines
274 # oldrev:newrev lines
261 activebookmark = l
275 data['activebookmark'] = l
262 else:
276 else:
263 args = l.split(':')
277 args = l.split(':')
264 oldrev = repo[args[0]].rev()
278 oldrev = repo[args[0]].rev()
@@ -276,35 +290,24 b' class rebaseruntime(object):'
276 else:
290 else:
277 state[oldrev] = repo[newrev].rev()
291 state[oldrev] = repo[newrev].rev()
278
292
279 except IOError as err:
293 if data['keepbranches'] is None:
280 if err.errno != errno.ENOENT:
281 raise
282 cmdutil.wrongtooltocontinue(repo, _('rebase'))
283
284 if keepbranches is None:
285 raise error.Abort(_('.hg/rebasestate is incomplete'))
294 raise error.Abort(_('.hg/rebasestate is incomplete'))
286
295
296 data['destmap'] = destmap
297 data['state'] = state
287 skipped = set()
298 skipped = set()
288 # recompute the set of skipped revs
299 # recompute the set of skipped revs
289 if not collapse:
300 if not data['collapse']:
290 seen = set(destmap.values())
301 seen = set(destmap.values())
291 for old, new in sorted(state.items()):
302 for old, new in sorted(state.items()):
292 if new != revtodo and new in seen:
303 if new != revtodo and new in seen:
293 skipped.add(old)
304 skipped.add(old)
294 seen.add(new)
305 seen.add(new)
306 data['skipped'] = skipped
295 repo.ui.debug('computed skipped revs: %s\n' %
307 repo.ui.debug('computed skipped revs: %s\n' %
296 (' '.join('%d' % r for r in sorted(skipped)) or ''))
308 (' '.join('%d' % r for r in sorted(skipped)) or ''))
297 repo.ui.debug('rebase status resumed\n')
298
309
299 self.originalwd = originalwd
310 return data
300 self.destmap = destmap
301 self.state = state
302 self.skipped = skipped
303 self.collapsef = collapse
304 self.keepf = keep
305 self.keepbranchesf = keepbranches
306 self.external = external
307 self.activebookmark = activebookmark
308
311
309 def _handleskippingobsolete(self, obsoleterevs, destmap):
312 def _handleskippingobsolete(self, obsoleterevs, destmap):
310 """Compute structures necessary for skipping obsolete revisions
313 """Compute structures necessary for skipping obsolete revisions
@@ -325,7 +328,7 b' class rebaseruntime(object):'
325 skippedset.update(obsoleteextinctsuccessors)
328 skippedset.update(obsoleteextinctsuccessors)
326 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
329 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
327
330
328 def _prepareabortorcontinue(self, isabort):
331 def _prepareabortorcontinue(self, isabort, backup=True, suppwarns=False):
329 try:
332 try:
330 self.restorestatus()
333 self.restorestatus()
331 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
334 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
@@ -341,8 +344,9 b' class rebaseruntime(object):'
341 hint = _('use "hg rebase --abort" to clear broken state')
344 hint = _('use "hg rebase --abort" to clear broken state')
342 raise error.Abort(msg, hint=hint)
345 raise error.Abort(msg, hint=hint)
343 if isabort:
346 if isabort:
344 return abort(self.repo, self.originalwd, self.destmap,
347 return abort(self.repo, self.originalwd, self.destmap, self.state,
345 self.state, activebookmark=self.activebookmark)
348 activebookmark=self.activebookmark, backup=backup,
349 suppwarns=suppwarns)
346
350
347 def _preparenewrebase(self, destmap):
351 def _preparenewrebase(self, destmap):
348 if not destmap:
352 if not destmap:
@@ -433,13 +437,10 b' class rebaseruntime(object):'
433 self.storestatus(tr)
437 self.storestatus(tr)
434
438
435 cands = [k for k, v in self.state.iteritems() if v == revtodo]
439 cands = [k for k, v in self.state.iteritems() if v == revtodo]
436 total = len(cands)
440 p = repo.ui.makeprogress(_("rebasing"), unit=_('changesets'),
437 posholder = [0]
441 total=len(cands))
438 def progress(ctx):
442 def progress(ctx):
439 posholder[0] += 1
443 p.increment(item=("%d:%s" % (ctx.rev(), ctx)))
440 self.repo.ui.progress(_("rebasing"), posholder[0],
441 ("%d:%s" % (ctx.rev(), ctx)),
442 _('changesets'), total)
443 allowdivergence = self.ui.configbool(
444 allowdivergence = self.ui.configbool(
444 'experimental', 'evolution.allowdivergence')
445 'experimental', 'evolution.allowdivergence')
445 for subset in sortsource(self.destmap):
446 for subset in sortsource(self.destmap):
@@ -452,7 +453,7 b' class rebaseruntime(object):'
452 )
453 )
453 for rev in sortedrevs:
454 for rev in sortedrevs:
454 self._rebasenode(tr, rev, allowdivergence, progress)
455 self._rebasenode(tr, rev, allowdivergence, progress)
455 ui.progress(_('rebasing'), None)
456 p.complete()
456 ui.note(_('rebase merging completed\n'))
457 ui.note(_('rebase merging completed\n'))
457
458
458 def _concludenode(self, rev, p1, p2, editor, commitmsg=None):
459 def _concludenode(self, rev, p1, p2, editor, commitmsg=None):
@@ -625,7 +626,7 b' class rebaseruntime(object):'
625 newwd = self.originalwd
626 newwd = self.originalwd
626 if newwd not in [c.rev() for c in repo[None].parents()]:
627 if newwd not in [c.rev() for c in repo[None].parents()]:
627 ui.note(_("update back to initial working directory parent\n"))
628 ui.note(_("update back to initial working directory parent\n"))
628 hg.updaterepo(repo, newwd, False)
629 hg.updaterepo(repo, newwd, overwrite=False)
629
630
630 collapsedas = None
631 collapsedas = None
631 if self.collapsef and not self.keepf:
632 if self.collapsef and not self.keepf:
@@ -673,8 +674,7 b' class rebaseruntime(object):'
673 ('a', 'abort', False, _('abort an interrupted rebase')),
674 ('a', 'abort', False, _('abort an interrupted rebase')),
674 ('', 'auto-orphans', '', _('automatically rebase orphan revisions '
675 ('', 'auto-orphans', '', _('automatically rebase orphan revisions '
675 'in the specified revset (EXPERIMENTAL)')),
676 'in the specified revset (EXPERIMENTAL)')),
676 ] +
677 ] + cmdutil.dryrunopts + cmdutil.formatteropts + cmdutil.confirmopts,
677 cmdutil.formatteropts,
678 _('[-s REV | -b REV] [-d REV] [OPTION]'))
678 _('[-s REV | -b REV] [-d REV] [OPTION]'))
679 def rebase(ui, repo, **opts):
679 def rebase(ui, repo, **opts):
680 """move changeset (and descendants) to a different branch
680 """move changeset (and descendants) to a different branch
@@ -797,7 +797,23 b' def rebase(ui, repo, **opts):'
797 unresolved conflicts.
797 unresolved conflicts.
798
798
799 """
799 """
800 opts = pycompat.byteskwargs(opts)
800 inmemory = ui.configbool('rebase', 'experimental.inmemory')
801 inmemory = ui.configbool('rebase', 'experimental.inmemory')
802 dryrun = opts.get('dry_run')
803 if dryrun:
804 if opts.get('abort'):
805 raise error.Abort(_('cannot specify both --dry-run and --abort'))
806 if opts.get('continue'):
807 raise error.Abort(_('cannot specify both --dry-run and --continue'))
808 if opts.get('confirm'):
809 dryrun = True
810 if opts.get('dry_run'):
811 raise error.Abort(_('cannot specify both --confirm and --dry-run'))
812 if opts.get('abort'):
813 raise error.Abort(_('cannot specify both --confirm and --abort'))
814 if opts.get('continue'):
815 raise error.Abort(_('cannot specify both --confirm and --continue'))
816
801 if (opts.get('continue') or opts.get('abort') or
817 if (opts.get('continue') or opts.get('abort') or
802 repo.currenttransaction() is not None):
818 repo.currenttransaction() is not None):
803 # in-memory rebase is not compatible with resuming rebases.
819 # in-memory rebase is not compatible with resuming rebases.
@@ -814,25 +830,67 b' def rebase(ui, repo, **opts):'
814 opts['rev'] = [revsetlang.formatspec('%ld and orphan()', userrevs)]
830 opts['rev'] = [revsetlang.formatspec('%ld and orphan()', userrevs)]
815 opts['dest'] = '_destautoorphanrebase(SRC)'
831 opts['dest'] = '_destautoorphanrebase(SRC)'
816
832
817 if inmemory:
833 if dryrun:
834 return _dryrunrebase(ui, repo, opts)
835 elif inmemory:
818 try:
836 try:
819 # in-memory merge doesn't support conflicts, so if we hit any, abort
837 # in-memory merge doesn't support conflicts, so if we hit any, abort
820 # and re-run as an on-disk merge.
838 # and re-run as an on-disk merge.
821 overrides = {('rebase', 'singletransaction'): True}
839 overrides = {('rebase', 'singletransaction'): True}
822 with ui.configoverride(overrides, 'rebase'):
840 with ui.configoverride(overrides, 'rebase'):
823 return _origrebase(ui, repo, inmemory=inmemory, **opts)
841 return _dorebase(ui, repo, opts, inmemory=inmemory)
824 except error.InMemoryMergeConflictsError:
842 except error.InMemoryMergeConflictsError:
825 ui.warn(_('hit merge conflicts; re-running rebase without in-memory'
843 ui.warn(_('hit merge conflicts; re-running rebase without in-memory'
826 ' merge\n'))
844 ' merge\n'))
827 _origrebase(ui, repo, **{'abort': True})
845 _dorebase(ui, repo, {'abort': True})
828 return _origrebase(ui, repo, inmemory=False, **opts)
846 return _dorebase(ui, repo, opts, inmemory=False)
829 else:
847 else:
830 return _origrebase(ui, repo, **opts)
848 return _dorebase(ui, repo, opts)
831
849
832 def _origrebase(ui, repo, inmemory=False, **opts):
850 def _dryrunrebase(ui, repo, opts):
833 opts = pycompat.byteskwargs(opts)
851 rbsrt = rebaseruntime(repo, ui, inmemory=True, opts=opts)
852 confirm = opts.get('confirm')
853 if confirm:
854 ui.status(_('starting in-memory rebase\n'))
855 else:
856 ui.status(_('starting dry-run rebase; repository will not be '
857 'changed\n'))
858 with repo.wlock(), repo.lock():
859 needsabort = True
860 try:
861 overrides = {('rebase', 'singletransaction'): True}
862 with ui.configoverride(overrides, 'rebase'):
863 _origrebase(ui, repo, opts, rbsrt, inmemory=True,
864 leaveunfinished=True)
865 except error.InMemoryMergeConflictsError:
866 ui.status(_('hit a merge conflict\n'))
867 return 1
868 else:
869 if confirm:
870 ui.status(_('rebase completed successfully\n'))
871 if not ui.promptchoice(_(b'apply changes (yn)?'
872 b'$$ &Yes $$ &No')):
873 # finish unfinished rebase
874 rbsrt._finishrebase()
875 else:
876 rbsrt._prepareabortorcontinue(isabort=True, backup=False,
877 suppwarns=True)
878 needsabort = False
879 else:
880 ui.status(_('dry-run rebase completed successfully; run without'
881 ' -n/--dry-run to perform this rebase\n'))
882 return 0
883 finally:
884 if needsabort:
885 # no need to store backup in case of dryrun
886 rbsrt._prepareabortorcontinue(isabort=True, backup=False,
887 suppwarns=True)
888
889 def _dorebase(ui, repo, opts, inmemory=False):
834 rbsrt = rebaseruntime(repo, ui, inmemory, opts)
890 rbsrt = rebaseruntime(repo, ui, inmemory, opts)
891 return _origrebase(ui, repo, opts, rbsrt, inmemory=inmemory)
835
892
893 def _origrebase(ui, repo, opts, rbsrt, inmemory=False, leaveunfinished=False):
836 with repo.wlock(), repo.lock():
894 with repo.wlock(), repo.lock():
837 # Validate input and define rebasing points
895 # Validate input and define rebasing points
838 destf = opts.get('dest', None)
896 destf = opts.get('dest', None)
@@ -902,7 +960,8 b' def _origrebase(ui, repo, inmemory=False'
902 dsguard = dirstateguard.dirstateguard(repo, 'rebase')
960 dsguard = dirstateguard.dirstateguard(repo, 'rebase')
903 with util.acceptintervention(dsguard):
961 with util.acceptintervention(dsguard):
904 rbsrt._performrebase(tr)
962 rbsrt._performrebase(tr)
905 rbsrt._finishrebase()
963 if not leaveunfinished:
964 rbsrt._finishrebase()
906
965
907 def _definedestmap(ui, repo, inmemory, destf=None, srcf=None, basef=None,
966 def _definedestmap(ui, repo, inmemory, destf=None, srcf=None, basef=None,
908 revf=None, destspace=None):
967 revf=None, destspace=None):
@@ -1255,13 +1314,7 b' def defineparents(repo, rev, destmap, st'
1255 # use unfiltered changelog since successorrevs may return filtered nodes
1314 # use unfiltered changelog since successorrevs may return filtered nodes
1256 assert repo.filtername is None
1315 assert repo.filtername is None
1257 cl = repo.changelog
1316 cl = repo.changelog
1258 def isancestor(a, b):
1317 isancestor = cl.isancestorrev
1259 # take revision numbers instead of nodes
1260 if a == b:
1261 return True
1262 elif a > b:
1263 return False
1264 return cl.isancestor(cl.node(a), cl.node(b))
1265
1318
1266 dest = destmap[rev]
1319 dest = destmap[rev]
1267 oldps = repo.changelog.parentrevs(rev) # old parents
1320 oldps = repo.changelog.parentrevs(rev) # old parents
@@ -1527,7 +1580,8 b' def needupdate(repo, state):'
1527
1580
1528 return False
1581 return False
1529
1582
1530 def abort(repo, originalwd, destmap, state, activebookmark=None):
1583 def abort(repo, originalwd, destmap, state, activebookmark=None, backup=True,
1584 suppwarns=False):
1531 '''Restore the repository to its original state. Additional args:
1585 '''Restore the repository to its original state. Additional args:
1532
1586
1533 activebookmark: the name of the bookmark that should be active after the
1587 activebookmark: the name of the bookmark that should be active after the
@@ -1572,8 +1626,7 b' def abort(repo, originalwd, destmap, sta'
1572
1626
1573 # Strip from the first rebased revision
1627 # Strip from the first rebased revision
1574 if rebased:
1628 if rebased:
1575 # no backup of rebased cset versions needed
1629 repair.strip(repo.ui, repo, strippoints, backup=backup)
1576 repair.strip(repo.ui, repo, strippoints)
1577
1630
1578 if activebookmark and activebookmark in repo._bookmarks:
1631 if activebookmark and activebookmark in repo._bookmarks:
1579 bookmarks.activate(repo, activebookmark)
1632 bookmarks.activate(repo, activebookmark)
@@ -1581,7 +1634,8 b' def abort(repo, originalwd, destmap, sta'
1581 finally:
1634 finally:
1582 clearstatus(repo)
1635 clearstatus(repo)
1583 clearcollapsemsg(repo)
1636 clearcollapsemsg(repo)
1584 repo.ui.warn(_('rebase aborted\n'))
1637 if not suppwarns:
1638 repo.ui.warn(_('rebase aborted\n'))
1585 return 0
1639 return 0
1586
1640
1587 def sortsource(destmap):
1641 def sortsource(destmap):
@@ -1790,33 +1844,31 b' def _computeobsoletenotrebased(repo, reb'
1790 assert repo.filtername is None
1844 assert repo.filtername is None
1791 cl = repo.changelog
1845 cl = repo.changelog
1792 nodemap = cl.nodemap
1846 nodemap = cl.nodemap
1793 extinctnodes = set(cl.node(r) for r in repo.revs('extinct()'))
1847 extinctrevs = set(repo.revs('extinct()'))
1794 for srcrev in rebaseobsrevs:
1848 for srcrev in rebaseobsrevs:
1795 srcnode = cl.node(srcrev)
1849 srcnode = cl.node(srcrev)
1796 destnode = cl.node(destmap[srcrev])
1797 # XXX: more advanced APIs are required to handle split correctly
1850 # XXX: more advanced APIs are required to handle split correctly
1798 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
1851 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
1799 # obsutil.allsuccessors includes node itself
1852 # obsutil.allsuccessors includes node itself
1800 successors.remove(srcnode)
1853 successors.remove(srcnode)
1801 if successors.issubset(extinctnodes):
1854 succrevs = {nodemap[s] for s in successors if s in nodemap}
1855 if succrevs.issubset(extinctrevs):
1802 # all successors are extinct
1856 # all successors are extinct
1803 obsoleteextinctsuccessors.add(srcrev)
1857 obsoleteextinctsuccessors.add(srcrev)
1804 if not successors:
1858 if not successors:
1805 # no successor
1859 # no successor
1806 obsoletenotrebased[srcrev] = None
1860 obsoletenotrebased[srcrev] = None
1807 else:
1861 else:
1808 for succnode in successors:
1862 dstrev = destmap[srcrev]
1809 if succnode not in nodemap:
1863 for succrev in succrevs:
1810 continue
1864 if cl.isancestorrev(succrev, dstrev):
1811 if cl.isancestor(succnode, destnode):
1865 obsoletenotrebased[srcrev] = succrev
1812 obsoletenotrebased[srcrev] = nodemap[succnode]
1813 break
1866 break
1814 else:
1867 else:
1815 # If 'srcrev' has a successor in rebase set but none in
1868 # If 'srcrev' has a successor in rebase set but none in
1816 # destination (which would be catched above), we shall skip it
1869 # destination (which would be catched above), we shall skip it
1817 # and its descendants to avoid divergence.
1870 # and its descendants to avoid divergence.
1818 if any(nodemap[s] in destmap for s in successors
1871 if any(s in destmap for s in succrevs):
1819 if s in nodemap):
1820 obsoletewithoutsuccessorindestination.add(srcrev)
1872 obsoletewithoutsuccessorindestination.add(srcrev)
1821
1873
1822 return (
1874 return (
@@ -70,17 +70,10 b' def relink(ui, repo, origin=None, **opts'
70 # No point in continuing
70 # No point in continuing
71 raise error.Abort(_('source and destination are on different devices'))
71 raise error.Abort(_('source and destination are on different devices'))
72
72
73 locallock = repo.lock()
73 with repo.lock(), src.lock():
74 try:
74 candidates = sorted(collect(src, ui))
75 remotelock = src.lock()
75 targets = prune(candidates, src.store.path, repo.store.path, ui)
76 try:
76 do_relink(src.store.path, repo.store.path, targets, ui)
77 candidates = sorted(collect(src, ui))
78 targets = prune(candidates, src.store.path, repo.store.path, ui)
79 do_relink(src.store.path, repo.store.path, targets, ui)
80 finally:
81 remotelock.release()
82 finally:
83 locallock.release()
84
77
85 def collect(src, ui):
78 def collect(src, ui):
86 seplen = len(os.path.sep)
79 seplen = len(os.path.sep)
@@ -94,6 +87,7 b' def collect(src, ui):'
94 # mozilla-central as of 2010-06-10 had a ratio of just over 7:5.
87 # mozilla-central as of 2010-06-10 had a ratio of just over 7:5.
95 total = live * 3 // 2
88 total = live * 3 // 2
96 src = src.store.path
89 src = src.store.path
90 progress = ui.makeprogress(_('collecting'), unit=_('files'), total=total)
97 pos = 0
91 pos = 0
98 ui.status(_("tip has %d files, estimated total number of files: %d\n")
92 ui.status(_("tip has %d files, estimated total number of files: %d\n")
99 % (live, total))
93 % (live, total))
@@ -108,9 +102,9 b' def collect(src, ui):'
108 continue
102 continue
109 pos += 1
103 pos += 1
110 candidates.append((os.path.join(relpath, filename), st))
104 candidates.append((os.path.join(relpath, filename), st))
111 ui.progress(_('collecting'), pos, filename, _('files'), total)
105 progress.update(pos, item=filename)
112
106
113 ui.progress(_('collecting'), None)
107 progress.complete()
114 ui.status(_('collected %d candidate storage files\n') % len(candidates))
108 ui.status(_('collected %d candidate storage files\n') % len(candidates))
115 return candidates
109 return candidates
116
110
@@ -132,7 +126,8 b' def prune(candidates, src, dst, ui):'
132 return st
126 return st
133
127
134 targets = []
128 targets = []
135 total = len(candidates)
129 progress = ui.makeprogress(_('pruning'), unit=_('files'),
130 total=len(candidates))
136 pos = 0
131 pos = 0
137 for fn, st in candidates:
132 for fn, st in candidates:
138 pos += 1
133 pos += 1
@@ -143,9 +138,9 b' def prune(candidates, src, dst, ui):'
143 ui.debug('not linkable: %s\n' % fn)
138 ui.debug('not linkable: %s\n' % fn)
144 continue
139 continue
145 targets.append((fn, ts.st_size))
140 targets.append((fn, ts.st_size))
146 ui.progress(_('pruning'), pos, fn, _('files'), total)
141 progress.update(pos, item=fn)
147
142
148 ui.progress(_('pruning'), None)
143 progress.complete()
149 ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
144 ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
150 return targets
145 return targets
151
146
@@ -164,8 +159,9 b' def do_relink(src, dst, files, ui):'
164 relinked = 0
159 relinked = 0
165 savedbytes = 0
160 savedbytes = 0
166
161
162 progress = ui.makeprogress(_('relinking'), unit=_('files'),
163 total=len(files))
167 pos = 0
164 pos = 0
168 total = len(files)
169 for f, sz in files:
165 for f, sz in files:
170 pos += 1
166 pos += 1
171 source = os.path.join(src, f)
167 source = os.path.join(src, f)
@@ -186,13 +182,13 b' def do_relink(src, dst, files, ui):'
186 continue
182 continue
187 try:
183 try:
188 relinkfile(source, tgt)
184 relinkfile(source, tgt)
189 ui.progress(_('relinking'), pos, f, _('files'), total)
185 progress.update(pos, item=f)
190 relinked += 1
186 relinked += 1
191 savedbytes += sz
187 savedbytes += sz
192 except OSError as inst:
188 except OSError as inst:
193 ui.warn('%s: %s\n' % (tgt, stringutil.forcebytestr(inst)))
189 ui.warn('%s: %s\n' % (tgt, stringutil.forcebytestr(inst)))
194
190
195 ui.progress(_('relinking'), None)
191 progress.complete()
196
192
197 ui.status(_('relinked %d files (%s reclaimed)\n') %
193 ui.status(_('relinked %d files (%s reclaimed)\n') %
198 (relinked, util.bytecount(savedbytes)))
194 (relinked, util.bytecount(savedbytes)))
@@ -249,6 +249,10 b' def extsetup(ui):'
249 extensions.wrapfunction(bookmarks, '_printbookmarks', wrapprintbookmarks)
249 extensions.wrapfunction(bookmarks, '_printbookmarks', wrapprintbookmarks)
250
250
251 def reposetup(ui, repo):
251 def reposetup(ui, repo):
252
253 # set the config option to store remotenames
254 repo.ui.setconfig('experimental', 'remotenames', True, 'remotenames-ext')
255
252 if not repo.local():
256 if not repo.local():
253 return
257 return
254
258
@@ -114,7 +114,7 b' schemes = {'
114
114
115 def extsetup(ui):
115 def extsetup(ui):
116 schemes.update(dict(ui.configitems('schemes')))
116 schemes.update(dict(ui.configitems('schemes')))
117 t = templater.engine(lambda x: x)
117 t = templater.engine(templater.parse)
118 for scheme, url in schemes.items():
118 for scheme, url in schemes.items():
119 if (pycompat.iswindows and len(scheme) == 1 and scheme.isalpha()
119 if (pycompat.iswindows and len(scheme) == 1 and scheme.isalpha()
120 and os.path.exists('%s:\\' % scheme)):
120 and os.path.exists('%s:\\' % scheme)):
@@ -594,10 +594,15 b' def listcmd(ui, repo, pats, opts):'
594 for chunk, label in patch.diffstatui(difflines, width=width):
594 for chunk, label in patch.diffstatui(difflines, width=width):
595 ui.write(chunk, label=label)
595 ui.write(chunk, label=label)
596
596
597 def patchcmds(ui, repo, pats, opts, subcommand):
597 def patchcmds(ui, repo, pats, opts):
598 """subcommand that displays shelves"""
598 """subcommand that displays shelves"""
599 if len(pats) == 0:
599 if len(pats) == 0:
600 raise error.Abort(_("--%s expects at least one shelf") % subcommand)
600 shelves = listshelves(repo)
601 if not shelves:
602 raise error.Abort(_("there are no shelves to show"))
603 mtime, name = shelves[0]
604 sname = util.split(name)[1]
605 pats = [sname]
601
606
602 for shelfname in pats:
607 for shelfname in pats:
603 if not shelvedfile(repo, shelfname, patchextension).exists():
608 if not shelvedfile(repo, shelfname, patchextension).exists():
@@ -621,14 +626,14 b' def unshelveabort(ui, repo, state, opts)'
621 try:
626 try:
622 checkparents(repo, state)
627 checkparents(repo, state)
623
628
624 repo.vfs.rename('unshelverebasestate', 'rebasestate')
629 merge.update(repo, state.pendingctx, False, True)
625 try:
630 if (state.activebookmark
626 rebase.rebase(ui, repo, **{
631 and state.activebookmark in repo._bookmarks):
627 r'abort' : True
632 bookmarks.activate(repo, state.activebookmark)
628 })
633
629 except Exception:
634 if repo.vfs.exists('unshelverebasestate'):
630 repo.vfs.rename('rebasestate', 'unshelverebasestate')
635 repo.vfs.rename('unshelverebasestate', 'rebasestate')
631 raise
636 rebase.clearstatus(repo)
632
637
633 mergefiles(ui, repo, state.wctx, state.pendingctx)
638 mergefiles(ui, repo, state.wctx, state.pendingctx)
634 repair.strip(ui, repo, state.nodestoremove, backup=False,
639 repair.strip(ui, repo, state.nodestoremove, backup=False,
@@ -683,22 +688,41 b' def unshelvecontinue(ui, repo, state, op'
683 _("unresolved conflicts, can't continue"),
688 _("unresolved conflicts, can't continue"),
684 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
689 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
685
690
686 repo.vfs.rename('unshelverebasestate', 'rebasestate')
691 shelvectx = repo[state.parents[1]]
687 try:
692 pendingctx = state.pendingctx
688 rebase.rebase(ui, repo, **{
693
689 r'continue' : True
694 with repo.dirstate.parentchange():
690 })
695 repo.setparents(state.pendingctx.node(), nodemod.nullid)
691 except Exception:
696 repo.dirstate.write(repo.currenttransaction())
692 repo.vfs.rename('rebasestate', 'unshelverebasestate')
697
693 raise
698 overrides = {('phases', 'new-commit'): phases.secret}
699 with repo.ui.configoverride(overrides, 'unshelve'):
700 with repo.dirstate.parentchange():
701 repo.setparents(state.parents[0], nodemod.nullid)
702 newnode = repo.commit(text=shelvectx.description(),
703 extra=shelvectx.extra(),
704 user=shelvectx.user(),
705 date=shelvectx.date())
694
706
695 shelvectx = repo['tip']
707 if newnode is None:
696 if state.pendingctx not in shelvectx.parents():
708 # If it ended up being a no-op commit, then the normal
697 # rebase was a no-op, so it produced no child commit
709 # merge state clean-up path doesn't happen, so do it
710 # here. Fix issue5494
711 merge.mergestate.clean(repo)
698 shelvectx = state.pendingctx
712 shelvectx = state.pendingctx
713 msg = _('note: unshelved changes already existed '
714 'in the working copy\n')
715 ui.status(msg)
699 else:
716 else:
700 # only strip the shelvectx if the rebase produced it
717 # only strip the shelvectx if we produced one
701 state.nodestoremove.append(shelvectx.node())
718 state.nodestoremove.append(newnode)
719 shelvectx = repo[newnode]
720
721 hg.updaterepo(repo, pendingctx.node(), overwrite=False)
722
723 if repo.vfs.exists('unshelverebasestate'):
724 repo.vfs.rename('unshelverebasestate', 'rebasestate')
725 rebase.clearstatus(repo)
702
726
703 mergefiles(ui, repo, state.wctx, shelvectx)
727 mergefiles(ui, repo, state.wctx, shelvectx)
704 restorebranch(ui, repo, state.branchtorestore)
728 restorebranch(ui, repo, state.branchtorestore)
@@ -746,33 +770,46 b' def _rebaserestoredcommit(ui, repo, opts'
746 if tmpwctx.node() == shelvectx.parents()[0].node():
770 if tmpwctx.node() == shelvectx.parents()[0].node():
747 return shelvectx
771 return shelvectx
748
772
749 ui.status(_('rebasing shelved changes\n'))
773 overrides = {
750 try:
774 ('ui', 'forcemerge'): opts.get('tool', ''),
751 rebase.rebase(ui, repo, **{
775 ('phases', 'new-commit'): phases.secret,
752 r'rev': [shelvectx.rev()],
776 }
753 r'dest': "%d" % tmpwctx.rev(),
777 with repo.ui.configoverride(overrides, 'unshelve'):
754 r'keep': True,
778 ui.status(_('rebasing shelved changes\n'))
755 r'tool': opts.get('tool', ''),
779 stats = merge.graft(repo, shelvectx, shelvectx.p1(),
756 })
780 labels=['shelve', 'working-copy'],
757 except error.InterventionRequired:
781 keepconflictparent=True)
758 tr.close()
782 if stats.unresolvedcount:
783 tr.close()
784
785 nodestoremove = [repo.changelog.node(rev)
786 for rev in xrange(oldtiprev, len(repo))]
787 shelvedstate.save(repo, basename, pctx, tmpwctx, nodestoremove,
788 branchtorestore, opts.get('keep'), activebookmark)
789 raise error.InterventionRequired(
790 _("unresolved conflicts (see 'hg resolve', then "
791 "'hg unshelve --continue')"))
759
792
760 nodestoremove = [repo.changelog.node(rev)
793 with repo.dirstate.parentchange():
761 for rev in xrange(oldtiprev, len(repo))]
794 repo.setparents(tmpwctx.node(), nodemod.nullid)
762 shelvedstate.save(repo, basename, pctx, tmpwctx, nodestoremove,
795 newnode = repo.commit(text=shelvectx.description(),
763 branchtorestore, opts.get('keep'), activebookmark)
796 extra=shelvectx.extra(),
797 user=shelvectx.user(),
798 date=shelvectx.date())
764
799
765 repo.vfs.rename('rebasestate', 'unshelverebasestate')
800 if newnode is None:
766 raise error.InterventionRequired(
801 # If it ended up being a no-op commit, then the normal
767 _("unresolved conflicts (see 'hg resolve', then "
802 # merge state clean-up path doesn't happen, so do it
768 "'hg unshelve --continue')"))
803 # here. Fix issue5494
804 merge.mergestate.clean(repo)
805 shelvectx = tmpwctx
806 msg = _('note: unshelved changes already existed '
807 'in the working copy\n')
808 ui.status(msg)
809 else:
810 shelvectx = repo[newnode]
811 hg.updaterepo(repo, tmpwctx.node(), False)
769
812
770 # refresh ctx after rebase completes
771 shelvectx = repo['tip']
772
773 if tmpwctx not in shelvectx.parents():
774 # rebase was a no-op, so it produced no child commit
775 shelvectx = tmpwctx
776 return shelvectx
813 return shelvectx
777
814
778 def _forgetunknownfiles(repo, shelvectx, addedbefore):
815 def _forgetunknownfiles(repo, shelvectx, addedbefore):
@@ -933,27 +970,27 b' def _dounshelve(ui, repo, *shelved, **op'
933 # to the original pctx.
970 # to the original pctx.
934
971
935 activebookmark = _backupactivebookmark(repo)
972 activebookmark = _backupactivebookmark(repo)
973 tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
974 tmpwctx)
975 repo, shelvectx = _unshelverestorecommit(ui, repo, basename)
976 _checkunshelveuntrackedproblems(ui, repo, shelvectx)
977 branchtorestore = ''
978 if shelvectx.branch() != shelvectx.p1().branch():
979 branchtorestore = shelvectx.branch()
980
981 shelvectx = _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev,
982 basename, pctx, tmpwctx,
983 shelvectx, branchtorestore,
984 activebookmark)
936 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
985 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
937 with ui.configoverride(overrides, 'unshelve'):
986 with ui.configoverride(overrides, 'unshelve'):
938 tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
987 mergefiles(ui, repo, pctx, shelvectx)
939 tmpwctx)
988 restorebranch(ui, repo, branchtorestore)
940 repo, shelvectx = _unshelverestorecommit(ui, repo, basename)
989 _forgetunknownfiles(repo, shelvectx, addedbefore)
941 _checkunshelveuntrackedproblems(ui, repo, shelvectx)
942 branchtorestore = ''
943 if shelvectx.branch() != shelvectx.p1().branch():
944 branchtorestore = shelvectx.branch()
945
990
946 shelvectx = _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev,
991 shelvedstate.clear(repo)
947 basename, pctx, tmpwctx,
992 _finishunshelve(repo, oldtiprev, tr, activebookmark)
948 shelvectx, branchtorestore,
993 unshelvecleanup(ui, repo, basename, opts)
949 activebookmark)
950 mergefiles(ui, repo, pctx, shelvectx)
951 restorebranch(ui, repo, branchtorestore)
952 _forgetunknownfiles(repo, shelvectx, addedbefore)
953
954 shelvedstate.clear(repo)
955 _finishunshelve(repo, oldtiprev, tr, activebookmark)
956 unshelvecleanup(ui, repo, basename, opts)
957 finally:
994 finally:
958 if tr:
995 if tr:
959 tr.release()
996 tr.release()
@@ -979,11 +1016,14 b' def _dounshelve(ui, repo, *shelved, **op'
979 ('n', 'name', '',
1016 ('n', 'name', '',
980 _('use the given name for the shelved commit'), _('NAME')),
1017 _('use the given name for the shelved commit'), _('NAME')),
981 ('p', 'patch', None,
1018 ('p', 'patch', None,
982 _('show patch')),
1019 _('output patches for changes (provide the names of the shelved '
1020 'changes as positional arguments)')),
983 ('i', 'interactive', None,
1021 ('i', 'interactive', None,
984 _('interactive mode, only works while creating a shelve')),
1022 _('interactive mode, only works while creating a shelve')),
985 ('', 'stat', None,
1023 ('', 'stat', None,
986 _('output diffstat-style summary of changes'))] + cmdutil.walkopts,
1024 _('output diffstat-style summary of changes (provide the names of '
1025 'the shelved changes as positional arguments)')
1026 )] + cmdutil.walkopts,
987 _('hg shelve [OPTION]... [FILE]...'))
1027 _('hg shelve [OPTION]... [FILE]...'))
988 def shelvecmd(ui, repo, *pats, **opts):
1028 def shelvecmd(ui, repo, *pats, **opts):
989 '''save and set aside changes from the working directory
1029 '''save and set aside changes from the working directory
@@ -1047,10 +1087,8 b' def shelvecmd(ui, repo, *pats, **opts):'
1047 return deletecmd(ui, repo, pats)
1087 return deletecmd(ui, repo, pats)
1048 elif checkopt('list'):
1088 elif checkopt('list'):
1049 return listcmd(ui, repo, pats, opts)
1089 return listcmd(ui, repo, pats, opts)
1050 elif checkopt('patch'):
1090 elif checkopt('patch') or checkopt('stat'):
1051 return patchcmds(ui, repo, pats, opts, subcommand='patch')
1091 return patchcmds(ui, repo, pats, opts)
1052 elif checkopt('stat'):
1053 return patchcmds(ui, repo, pats, opts, subcommand='stat')
1054 else:
1092 else:
1055 return createcmd(ui, repo, pats, opts)
1093 return createcmd(ui, repo, pats, opts)
1056
1094
@@ -138,9 +138,9 b' def _setuplog(ui):'
138 extensions.wrapfunction(logcmdutil, '_initialrevs', _initialrevs)
138 extensions.wrapfunction(logcmdutil, '_initialrevs', _initialrevs)
139
139
140 def _clonesparsecmd(orig, ui, repo, *args, **opts):
140 def _clonesparsecmd(orig, ui, repo, *args, **opts):
141 include_pat = opts.get('include')
141 include_pat = opts.get(r'include')
142 exclude_pat = opts.get('exclude')
142 exclude_pat = opts.get(r'exclude')
143 enableprofile_pat = opts.get('enable_profile')
143 enableprofile_pat = opts.get(r'enable_profile')
144 include = exclude = enableprofile = False
144 include = exclude = enableprofile = False
145 if include_pat:
145 if include_pat:
146 pat = include_pat
146 pat = include_pat
@@ -178,7 +178,7 b' def _setupadd(ui):'
178 'also include directories of added files in sparse config'))
178 'also include directories of added files in sparse config'))
179
179
180 def _add(orig, ui, repo, *pats, **opts):
180 def _add(orig, ui, repo, *pats, **opts):
181 if opts.get('sparse'):
181 if opts.get(r'sparse'):
182 dirs = set()
182 dirs = set()
183 for pat in pats:
183 for pat in pats:
184 dirname, basename = util.split(pat)
184 dirname, basename = util.split(pat)
@@ -60,6 +60,7 b' def split(ui, repo, *revs, **opts):'
60 By default, rebase connected non-obsoleted descendants onto the new
60 By default, rebase connected non-obsoleted descendants onto the new
61 changeset. Use --no-rebase to avoid the rebase.
61 changeset. Use --no-rebase to avoid the rebase.
62 """
62 """
63 opts = pycompat.byteskwargs(opts)
63 revlist = []
64 revlist = []
64 if opts.get('rev'):
65 if opts.get('rev'):
65 revlist.append(opts.get('rev'))
66 revlist.append(opts.get('rev'))
@@ -169,7 +170,7 b' def dosplit(ui, repo, tr, ctx, opts):'
169 raise error.Abort(_('cannot split an empty revision'))
170 raise error.Abort(_('cannot split an empty revision'))
170
171
171 scmutil.cleanupnodes(repo, {ctx.node(): [c.node() for c in committed]},
172 scmutil.cleanupnodes(repo, {ctx.node(): [c.node() for c in committed]},
172 operation='split')
173 operation='split', fixphase=True)
173
174
174 return committed[-1]
175 return committed[-1]
175
176
@@ -103,8 +103,9 b' def strip(ui, repo, revs, update=True, b'
103 'option)'), _('REV')),
103 'option)'), _('REV')),
104 ('f', 'force', None, _('force removal of changesets, discard '
104 ('f', 'force', None, _('force removal of changesets, discard '
105 'uncommitted changes (no backup)')),
105 'uncommitted changes (no backup)')),
106 ('', 'no-backup', None, _('no backups')),
106 ('', 'no-backup', None, _('do not save backup bundle')),
107 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
107 ('', 'nobackup', None, _('do not save backup bundle '
108 '(DEPRECATED)')),
108 ('n', '', None, _('ignored (DEPRECATED)')),
109 ('n', '', None, _('ignored (DEPRECATED)')),
109 ('k', 'keep', None, _("do not modify working directory during "
110 ('k', 'keep', None, _("do not modify working directory during "
110 "strip")),
111 "strip")),
@@ -165,7 +166,7 b' def stripcmd(ui, repo, *revs, **opts):'
165 nodetobookmarks.setdefault(node, []).append(mark)
166 nodetobookmarks.setdefault(node, []).append(mark)
166 for marks in nodetobookmarks.values():
167 for marks in nodetobookmarks.values():
167 if bookmarks.issuperset(marks):
168 if bookmarks.issuperset(marks):
168 rsrevs = repair.stripbmrevset(repo, marks[0])
169 rsrevs = scmutil.bookmarkrevs(repo, marks[0])
169 revs.update(set(rsrevs))
170 revs.update(set(rsrevs))
170 if not revs:
171 if not revs:
171 with repo.lock(), repo.transaction('bookmark') as tr:
172 with repo.lock(), repo.transaction('bookmark') as tr:
@@ -16,7 +16,7 b' map from a changeset hash to its hash in'
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import os
18 import os
19 import tempfile
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial import (
21 from mercurial import (
22 bundlerepo,
22 bundlerepo,
@@ -215,7 +215,7 b' class transplanter(object):'
215 if skipmerge:
215 if skipmerge:
216 patchfile = None
216 patchfile = None
217 else:
217 else:
218 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
218 fd, patchfile = pycompat.mkstemp(prefix='hg-transplant-')
219 fp = os.fdopen(fd, r'wb')
219 fp = os.fdopen(fd, r'wb')
220 gen = patch.diff(source, parent, node, opts=diffopts)
220 gen = patch.diff(source, parent, node, opts=diffopts)
221 for chunk in gen:
221 for chunk in gen:
@@ -263,7 +263,7 b' class transplanter(object):'
263
263
264 self.ui.status(_('filtering %s\n') % patchfile)
264 self.ui.status(_('filtering %s\n') % patchfile)
265 user, date, msg = (changelog[1], changelog[2], changelog[4])
265 user, date, msg = (changelog[1], changelog[2], changelog[4])
266 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
266 fd, headerfile = pycompat.mkstemp(prefix='hg-transplant-')
267 fp = os.fdopen(fd, r'wb')
267 fp = os.fdopen(fd, r'wb')
268 fp.write("# HG changeset patch\n")
268 fp.write("# HG changeset patch\n")
269 fp.write("# User %s\n" % user)
269 fp.write("# User %s\n" % user)
@@ -523,7 +523,8 b' def browserevs(ui, repo, nodes, opts):'
523 displayer.show(repo[node])
523 displayer.show(repo[node])
524 action = None
524 action = None
525 while not action:
525 while not action:
526 action = 'ynmpcq?'[ui.promptchoice(prompt)]
526 choice = ui.promptchoice(prompt)
527 action = 'ynmpcq?'[choice:choice + 1]
527 if action == '?':
528 if action == '?':
528 for c, t in ui.extractchoices(prompt)[1]:
529 for c, t in ui.extractchoices(prompt)[1]:
529 ui.write('%s: %s\n' % (c, t))
530 ui.write('%s: %s\n' % (c, t))
@@ -682,7 +683,7 b' def _dotransplant(ui, repo, *revs, **opt'
682 sourcerepo = opts.get('source')
683 sourcerepo = opts.get('source')
683 if sourcerepo:
684 if sourcerepo:
684 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
685 peer = hg.peer(repo, opts, ui.expandpath(sourcerepo))
685 heads = map(peer.lookup, opts.get('branch', ()))
686 heads = pycompat.maplist(peer.lookup, opts.get('branch', ()))
686 target = set(heads)
687 target = set(heads)
687 for r in revs:
688 for r in revs:
688 try:
689 try:
@@ -693,7 +694,7 b' def _dotransplant(ui, repo, *revs, **opt'
693 onlyheads=sorted(target), force=True)
694 onlyheads=sorted(target), force=True)
694 else:
695 else:
695 source = repo
696 source = repo
696 heads = map(source.lookup, opts.get('branch', ()))
697 heads = pycompat.maplist(source.lookup, opts.get('branch', ()))
697 cleanupfn = None
698 cleanupfn = None
698
699
699 try:
700 try:
@@ -708,7 +709,7 b' def _dotransplant(ui, repo, *revs, **opt'
708 matchfn = lambda x: tf(x) and x not in prune
709 matchfn = lambda x: tf(x) and x not in prune
709 else:
710 else:
710 matchfn = tf
711 matchfn = tf
711 merges = map(source.lookup, opts.get('merge', ()))
712 merges = pycompat.maplist(source.lookup, opts.get('merge', ()))
712 revmap = {}
713 revmap = {}
713 if revs:
714 if revs:
714 for r in scmutil.revrange(source, revs):
715 for r in scmutil.revrange(source, revs):
@@ -91,12 +91,7 b' def _commitfiltered(repo, ctx, match, ke'
91 user=ctx.user(),
91 user=ctx.user(),
92 date=ctx.date(),
92 date=ctx.date(),
93 extra=ctx.extra())
93 extra=ctx.extra())
94 # phase handling
94 return repo.commitctx(new)
95 commitphase = ctx.phase()
96 overrides = {('phases', 'new-commit'): commitphase}
97 with repo.ui.configoverride(overrides, 'uncommit'):
98 newid = repo.commitctx(new)
99 return newid
100
95
101 def _fixdirstate(repo, oldctx, newctx, status):
96 def _fixdirstate(repo, oldctx, newctx, status):
102 """ fix the dirstate after switching the working directory from oldctx to
97 """ fix the dirstate after switching the working directory from oldctx to
@@ -183,7 +178,7 b' def uncommit(ui, repo, *pats, **opts):'
183 # Fully removed the old commit
178 # Fully removed the old commit
184 mapping[old.node()] = ()
179 mapping[old.node()] = ()
185
180
186 scmutil.cleanupnodes(repo, mapping, 'uncommit')
181 scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True)
187
182
188 with repo.dirstate.parentchange():
183 with repo.dirstate.parentchange():
189 repo.dirstate.setparents(newid, node.nullid)
184 repo.dirstate.setparents(newid, node.nullid)
@@ -242,12 +237,7 b' def unamend(ui, repo, **opts):'
242 user=predctx.user(),
237 user=predctx.user(),
243 date=predctx.date(),
238 date=predctx.date(),
244 extra=extras)
239 extra=extras)
245 # phase handling
240 newprednode = repo.commitctx(newctx)
246 commitphase = curctx.phase()
247 overrides = {('phases', 'new-commit'): commitphase}
248 with repo.ui.configoverride(overrides, 'uncommit'):
249 newprednode = repo.commitctx(newctx)
250
251 newpredctx = repo[newprednode]
241 newpredctx = repo[newprednode]
252 dirstate = repo.dirstate
242 dirstate = repo.dirstate
253
243
@@ -257,4 +247,4 b' def unamend(ui, repo, **opts):'
257 _fixdirstate(repo, curctx, newpredctx, s)
247 _fixdirstate(repo, curctx, newpredctx, s)
258
248
259 mapping = {curctx.node(): (newprednode,)}
249 mapping = {curctx.node(): (newprednode,)}
260 scmutil.cleanupnodes(repo, mapping, 'unamend')
250 scmutil.cleanupnodes(repo, mapping, 'unamend', fixphase=True)
@@ -90,7 +90,7 b' def decode(arg):'
90 return arg
90 return arg
91
91
92 def encode(arg):
92 def encode(arg):
93 if isinstance(arg, unicode):
93 if isinstance(arg, pycompat.unicode):
94 return arg.encode(_encoding)
94 return arg.encode(_encoding)
95 elif isinstance(arg, tuple):
95 elif isinstance(arg, tuple):
96 return tuple(map(encode, arg))
96 return tuple(map(encode, arg))
@@ -127,7 +127,7 b' def basewrapper(func, argtype, enc, dec,'
127 " %s encoding\n") % (_encoding))
127 " %s encoding\n") % (_encoding))
128
128
129 def wrapper(func, args, kwds):
129 def wrapper(func, args, kwds):
130 return basewrapper(func, unicode, encode, decode, args, kwds)
130 return basewrapper(func, pycompat.unicode, encode, decode, args, kwds)
131
131
132
132
133 def reversewrapper(func, args, kwds):
133 def reversewrapper(func, args, kwds):
@@ -13696,7 +13696,7 b' msgid ""'
13696 msgstr ""
13696 msgstr ""
13697
13697
13698 msgid ""
13698 msgid ""
13699 "``allow_archive``\n"
13699 "``allow-archive``\n"
13700 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
13700 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
13701 " Default is empty."
13701 " Default is empty."
13702 msgstr ""
13702 msgstr ""
@@ -17347,7 +17347,7 b' msgid ""'
17347 msgstr ""
17347 msgstr ""
17348
17348
17349 msgid ""
17349 msgid ""
17350 "``allow_archive``\n"
17350 "``allow-archive``\n"
17351 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
17351 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
17352 " Default is empty."
17352 " Default is empty."
17353 msgstr ""
17353 msgstr ""
@@ -27712,11 +27712,11 b' msgstr ""'
27712 " サーバの待ちうけアドレス。 (デフォルト値: ホストの持つ全アドレス)"
27712 " サーバの待ちうけアドレス。 (デフォルト値: ホストの持つ全アドレス)"
27713
27713
27714 msgid ""
27714 msgid ""
27715 "``allow_archive``\n"
27715 "``allow-archive``\n"
27716 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
27716 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
27717 " (default: empty)"
27717 " (default: empty)"
27718 msgstr ""
27718 msgstr ""
27719 "``allow_archive``\n"
27719 "``allow-archive``\n"
27720 " 利用可能なダウンロード向けのアーカイブ形式 (bz2, gz, zip) 一覧。\n"
27720 " 利用可能なダウンロード向けのアーカイブ形式 (bz2, gz, zip) 一覧。\n"
27721 " (デフォルト値: 空 = ダウンロード不可)"
27721 " (デフォルト値: 空 = ダウンロード不可)"
27722
27722
@@ -28663,11 +28663,11 b' msgstr ""'
28663 " (padrão: usa todos os endereços)"
28663 " (padrão: usa todos os endereços)"
28664
28664
28665 msgid ""
28665 msgid ""
28666 "``allow_archive``\n"
28666 "``allow-archive``\n"
28667 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
28667 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
28668 " (default: empty)"
28668 " (default: empty)"
28669 msgstr ""
28669 msgstr ""
28670 "``allow_archive``\n"
28670 "``allow-archive``\n"
28671 " Lista de formatos de pacote (bz2, gz, zip) permitidos para download.\n"
28671 " Lista de formatos de pacote (bz2, gz, zip) permitidos para download.\n"
28672 " (padrão: lista vazia)"
28672 " (padrão: lista vazia)"
28673
28673
@@ -12099,7 +12099,7 b' msgid ""'
12099 msgstr ""
12099 msgstr ""
12100
12100
12101 msgid ""
12101 msgid ""
12102 "``allow_archive``\n"
12102 "``allow-archive``\n"
12103 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
12103 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
12104 " Default is empty."
12104 " Default is empty."
12105 msgstr ""
12105 msgstr ""
@@ -19776,11 +19776,11 b' msgstr ""'
19776 " Адрес прослушиваемого интерфейса. По умолчанию все интерфейсы."
19776 " Адрес прослушиваемого интерфейса. По умолчанию все интерфейсы."
19777
19777
19778 msgid ""
19778 msgid ""
19779 "``allow_archive``\n"
19779 "``allow-archive``\n"
19780 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
19780 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
19781 " Default is empty."
19781 " Default is empty."
19782 msgstr ""
19782 msgstr ""
19783 "``allow_archive``\n"
19783 "``allow-archive``\n"
19784 " Список форматов архивов (bz2, gz, zip), которые можно скачивать.\n"
19784 " Список форматов архивов (bz2, gz, zip), которые можно скачивать.\n"
19785 " По умолчанию пуст."
19785 " По умолчанию пуст."
19786
19786
@@ -15034,7 +15034,7 b' msgid ""'
15034 msgstr ""
15034 msgstr ""
15035
15035
15036 msgid ""
15036 msgid ""
15037 "``allow_archive``\n"
15037 "``allow-archive``\n"
15038 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
15038 " List of archive format (bz2, gz, zip) allowed for downloading.\n"
15039 " Default is empty."
15039 " Default is empty."
15040 msgstr ""
15040 msgstr ""
@@ -339,6 +339,10 b' class lazyancestors(object):'
339 seen = self._containsseen
339 seen = self._containsseen
340 if target in seen:
340 if target in seen:
341 return True
341 return True
342 # Only integer target is valid, but some callers expect 'None in self'
343 # to be False. So we explicitly allow it.
344 if target is None:
345 return False
342
346
343 parentrevs = self._parentrevs
347 parentrevs = self._parentrevs
344 visit = self._containsvisit
348 visit = self._containsvisit
@@ -322,13 +322,14 b' def archive(repo, dest, node, kind, deco'
322 files.sort()
322 files.sort()
323 scmutil.prefetchfiles(repo, [ctx.rev()],
323 scmutil.prefetchfiles(repo, [ctx.rev()],
324 scmutil.matchfiles(repo, files))
324 scmutil.matchfiles(repo, files))
325 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
325 progress = scmutil.progress(repo.ui, _('archiving'), unit=_('files'),
326 for i, f in enumerate(files):
326 total=total)
327 progress.update(0)
328 for f in files:
327 ff = ctx.flags(f)
329 ff = ctx.flags(f)
328 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
330 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
329 repo.ui.progress(_('archiving'), i + 1, item=f,
331 progress.increment(item=f)
330 unit=_('files'), total=total)
332 progress.complete()
331 repo.ui.progress(_('archiving'), None)
332
333
333 if subrepos:
334 if subrepos:
334 for subpath in sorted(ctx.substate):
335 for subpath in sorted(ctx.substate):
@@ -310,6 +310,7 b' int bdiff_diff(struct bdiff_line *a, int'
310 return count;
310 return count;
311 }
311 }
312
312
313 /* deallocate list of hunks; l may be NULL */
313 void bdiff_freehunks(struct bdiff_hunk *l)
314 void bdiff_freehunks(struct bdiff_hunk *l)
314 {
315 {
315 struct bdiff_hunk *n;
316 struct bdiff_hunk *n;
@@ -9,7 +9,8 b' static inline uint32_t getbe32(const cha'
9 {
9 {
10 const unsigned char *d = (const unsigned char *)c;
10 const unsigned char *d = (const unsigned char *)c;
11
11
12 return ((d[0] << 24) | (d[1] << 16) | (d[2] << 8) | (d[3]));
12 return ((((uint32_t)d[0]) << 24) | (((uint32_t)d[1]) << 16) |
13 (((uint32_t)d[2]) << 8) | (d[3]));
13 }
14 }
14
15
15 static inline int16_t getbeint16(const char *c)
16 static inline int16_t getbeint16(const char *c)
@@ -43,7 +43,7 b' def _getbkfile(repo):'
43 fp, pending = txnutil.trypending(repo.root, repo.vfs, 'bookmarks')
43 fp, pending = txnutil.trypending(repo.root, repo.vfs, 'bookmarks')
44 return fp
44 return fp
45
45
46 class bmstore(dict):
46 class bmstore(object):
47 """Storage for bookmarks.
47 """Storage for bookmarks.
48
48
49 This object should do all bookmark-related reads and writes, so
49 This object should do all bookmark-related reads and writes, so
@@ -58,13 +58,13 b' class bmstore(dict):'
58 """
58 """
59
59
60 def __init__(self, repo):
60 def __init__(self, repo):
61 dict.__init__(self)
62 self._repo = repo
61 self._repo = repo
62 self._refmap = refmap = {} # refspec: node
63 self._nodemap = nodemap = {} # node: sorted([refspec, ...])
63 self._clean = True
64 self._clean = True
64 self._aclean = True
65 self._aclean = True
65 nm = repo.changelog.nodemap
66 nm = repo.changelog.nodemap
66 tonode = bin # force local lookup
67 tonode = bin # force local lookup
67 setitem = dict.__setitem__
68 try:
68 try:
69 with _getbkfile(repo) as bkfile:
69 with _getbkfile(repo) as bkfile:
70 for line in bkfile:
70 for line in bkfile:
@@ -76,7 +76,15 b' class bmstore(dict):'
76 node = tonode(sha)
76 node = tonode(sha)
77 if node in nm:
77 if node in nm:
78 refspec = encoding.tolocal(refspec)
78 refspec = encoding.tolocal(refspec)
79 setitem(self, refspec, node)
79 refmap[refspec] = node
80 nrefs = nodemap.get(node)
81 if nrefs is None:
82 nodemap[node] = [refspec]
83 else:
84 nrefs.append(refspec)
85 if nrefs[-2] > refspec:
86 # bookmarks weren't sorted before 4.5
87 nrefs.sort()
80 except (TypeError, ValueError):
88 except (TypeError, ValueError):
81 # TypeError:
89 # TypeError:
82 # - bin(...)
90 # - bin(...)
@@ -96,38 +104,78 b' class bmstore(dict):'
96
104
97 @active.setter
105 @active.setter
98 def active(self, mark):
106 def active(self, mark):
99 if mark is not None and mark not in self:
107 if mark is not None and mark not in self._refmap:
100 raise AssertionError('bookmark %s does not exist!' % mark)
108 raise AssertionError('bookmark %s does not exist!' % mark)
101
109
102 self._active = mark
110 self._active = mark
103 self._aclean = False
111 self._aclean = False
104
112
105 def __setitem__(self, *args, **kwargs):
113 def __len__(self):
106 raise error.ProgrammingError("use 'bookmarks.applychanges' instead")
114 return len(self._refmap)
115
116 def __iter__(self):
117 return iter(self._refmap)
118
119 def iteritems(self):
120 return self._refmap.iteritems()
121
122 def items(self):
123 return self._refmap.items()
107
124
108 def _set(self, key, value):
125 # TODO: maybe rename to allnames()?
109 self._clean = False
126 def keys(self):
110 return dict.__setitem__(self, key, value)
127 return self._refmap.keys()
128
129 # TODO: maybe rename to allnodes()? but nodes would have to be deduplicated
130 # could be self._nodemap.keys()
131 def values(self):
132 return self._refmap.values()
133
134 def __contains__(self, mark):
135 return mark in self._refmap
136
137 def __getitem__(self, mark):
138 return self._refmap[mark]
111
139
112 def __delitem__(self, key):
140 def get(self, mark, default=None):
113 raise error.ProgrammingError("use 'bookmarks.applychanges' instead")
141 return self._refmap.get(mark, default)
114
142
115 def _del(self, key):
143 def _set(self, mark, node):
116 self._clean = False
144 self._clean = False
117 return dict.__delitem__(self, key)
145 if mark in self._refmap:
146 self._del(mark)
147 self._refmap[mark] = node
148 nrefs = self._nodemap.get(node)
149 if nrefs is None:
150 self._nodemap[node] = [mark]
151 else:
152 nrefs.append(mark)
153 nrefs.sort()
118
154
119 def update(self, *others):
155 def _del(self, mark):
120 raise error.ProgrammingError("use 'bookmarks.applychanges' instead")
156 self._clean = False
157 node = self._refmap.pop(mark)
158 nrefs = self._nodemap[node]
159 if len(nrefs) == 1:
160 assert nrefs[0] == mark
161 del self._nodemap[node]
162 else:
163 nrefs.remove(mark)
164
165 def names(self, node):
166 """Return a sorted list of bookmarks pointing to the specified node"""
167 return self._nodemap.get(node, [])
121
168
122 def changectx(self, mark):
169 def changectx(self, mark):
123 return self._repo[self[mark]]
170 node = self._refmap[mark]
171 return self._repo[node]
124
172
125 def applychanges(self, repo, tr, changes):
173 def applychanges(self, repo, tr, changes):
126 """Apply a list of changes to bookmarks
174 """Apply a list of changes to bookmarks
127 """
175 """
128 bmchanges = tr.changes.get('bookmarks')
176 bmchanges = tr.changes.get('bookmarks')
129 for name, node in changes:
177 for name, node in changes:
130 old = self.get(name)
178 old = self._refmap.get(name)
131 if node is None:
179 if node is None:
132 self._del(name)
180 self._del(name)
133 else:
181 else:
@@ -151,7 +199,7 b' class bmstore(dict):'
151 def _writerepo(self, repo):
199 def _writerepo(self, repo):
152 """Factored out for extensibility"""
200 """Factored out for extensibility"""
153 rbm = repo._bookmarks
201 rbm = repo._bookmarks
154 if rbm.active not in self:
202 if rbm.active not in self._refmap:
155 rbm.active = None
203 rbm.active = None
156 rbm._writeactive()
204 rbm._writeactive()
157
205
@@ -182,7 +230,7 b' class bmstore(dict):'
182 self._aclean = True
230 self._aclean = True
183
231
184 def _write(self, fp):
232 def _write(self, fp):
185 for name, node in sorted(self.iteritems()):
233 for name, node in sorted(self._refmap.iteritems()):
186 fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
234 fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
187 self._clean = True
235 self._clean = True
188 self._repo.invalidatevolatilesets()
236 self._repo.invalidatevolatilesets()
@@ -208,15 +256,15 b' class bmstore(dict):'
208 If divergent bookmark are to be deleted, they will be returned as list.
256 If divergent bookmark are to be deleted, they will be returned as list.
209 """
257 """
210 cur = self._repo['.'].node()
258 cur = self._repo['.'].node()
211 if mark in self and not force:
259 if mark in self._refmap and not force:
212 if target:
260 if target:
213 if self[mark] == target and target == cur:
261 if self._refmap[mark] == target and target == cur:
214 # re-activating a bookmark
262 # re-activating a bookmark
215 return []
263 return []
216 rev = self._repo[target].rev()
264 rev = self._repo[target].rev()
217 anc = self._repo.changelog.ancestors([rev])
265 anc = self._repo.changelog.ancestors([rev])
218 bmctx = self.changectx(mark)
266 bmctx = self.changectx(mark)
219 divs = [self[b] for b in self
267 divs = [self._refmap[b] for b in self._refmap
220 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
268 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
221
269
222 # allow resolving a single divergent bookmark even if moving
270 # allow resolving a single divergent bookmark even if moving
@@ -765,7 +813,7 b' def validdest(repo, old, new):'
765 return new.node() in obsutil.foreground(repo, [old.node()])
813 return new.node() in obsutil.foreground(repo, [old.node()])
766 else:
814 else:
767 # still an independent clause as it is lazier (and therefore faster)
815 # still an independent clause as it is lazier (and therefore faster)
768 return old.descendant(new)
816 return old.isancestorof(new)
769
817
770 def checkformat(repo, mark):
818 def checkformat(repo, mark):
771 """return a valid version of a potential bookmark name
819 """return a valid version of a potential bookmark name
@@ -875,11 +923,14 b' def _printbookmarks(ui, repo, bmarks, **'
875 """
923 """
876 opts = pycompat.byteskwargs(opts)
924 opts = pycompat.byteskwargs(opts)
877 fm = ui.formatter('bookmarks', opts)
925 fm = ui.formatter('bookmarks', opts)
926 contexthint = fm.contexthint('bookmark rev node active')
878 hexfn = fm.hexfunc
927 hexfn = fm.hexfunc
879 if len(bmarks) == 0 and fm.isplain():
928 if len(bmarks) == 0 and fm.isplain():
880 ui.status(_("no bookmarks set\n"))
929 ui.status(_("no bookmarks set\n"))
881 for bmark, (n, prefix, label) in sorted(bmarks.iteritems()):
930 for bmark, (n, prefix, label) in sorted(bmarks.iteritems()):
882 fm.startitem()
931 fm.startitem()
932 if 'ctx' in contexthint:
933 fm.context(ctx=repo[n])
883 if not ui.quiet:
934 if not ui.quiet:
884 fm.plain(' %s ' % prefix, label=label)
935 fm.plain(' %s ' % prefix, label=label)
885 fm.write('bookmark', '%s', bmark, label=label)
936 fm.write('bookmark', '%s', bmark, label=label)
@@ -628,9 +628,10 b' class bundle20(object):'
628 def addparam(self, name, value=None):
628 def addparam(self, name, value=None):
629 """add a stream level parameter"""
629 """add a stream level parameter"""
630 if not name:
630 if not name:
631 raise ValueError(r'empty parameter name')
631 raise error.ProgrammingError(b'empty parameter name')
632 if name[0:1] not in pycompat.bytestr(string.ascii_letters):
632 if name[0:1] not in pycompat.bytestr(string.ascii_letters):
633 raise ValueError(r'non letter first character: %s' % name)
633 raise error.ProgrammingError(b'non letter first character: %s'
634 % name)
634 self._params.append((name, value))
635 self._params.append((name, value))
635
636
636 def addpart(self, part):
637 def addpart(self, part):
@@ -1877,7 +1878,7 b' def handleremotechangegroup(op, inpart):'
1877 real_part.validate()
1878 real_part.validate()
1878 except error.Abort as e:
1879 except error.Abort as e:
1879 raise error.Abort(_('bundle at %s is corrupted:\n%s') %
1880 raise error.Abort(_('bundle at %s is corrupted:\n%s') %
1880 (util.hidepassword(raw_url), str(e)))
1881 (util.hidepassword(raw_url), bytes(e)))
1881 assert not inpart.read()
1882 assert not inpart.read()
1882
1883
1883 @parthandler('reply:changegroup', ('return', 'in-reply-to'))
1884 @parthandler('reply:changegroup', ('return', 'in-reply-to'))
@@ -15,7 +15,6 b' from __future__ import absolute_import'
15
15
16 import os
16 import os
17 import shutil
17 import shutil
18 import tempfile
19
18
20 from .i18n import _
19 from .i18n import _
21 from .node import nullid
20 from .node import nullid
@@ -270,7 +269,7 b' class bundlerepository(localrepo.localre'
270 try:
269 try:
271 localrepo.localrepository.__init__(self, ui, repopath)
270 localrepo.localrepository.__init__(self, ui, repopath)
272 except error.RepoError:
271 except error.RepoError:
273 self._tempparent = tempfile.mkdtemp()
272 self._tempparent = pycompat.mkdtemp()
274 localrepo.instance(ui, self._tempparent, 1)
273 localrepo.instance(ui, self._tempparent, 1)
275 localrepo.localrepository.__init__(self, ui, self._tempparent)
274 localrepo.localrepository.__init__(self, ui, self._tempparent)
276 self.ui.setconfig('phases', 'publish', False, 'bundlerepo')
275 self.ui.setconfig('phases', 'publish', False, 'bundlerepo')
@@ -157,9 +157,7 b' cleanup:'
157 PyBuffer_Release(&bb);
157 PyBuffer_Release(&bb);
158 free(al);
158 free(al);
159 free(bl);
159 free(bl);
160 if (l.next) {
160 bdiff_freehunks(l.next);
161 bdiff_freehunks(l.next);
162 }
163 return result;
161 return result;
164 }
162 }
165
163
@@ -713,7 +713,7 b' void dirs_module_init(PyObject *mod);'
713 void manifest_module_init(PyObject *mod);
713 void manifest_module_init(PyObject *mod);
714 void revlog_module_init(PyObject *mod);
714 void revlog_module_init(PyObject *mod);
715
715
716 static const int version = 4;
716 static const int version = 5;
717
717
718 static void module_init(PyObject *mod)
718 static void module_init(PyObject *mod)
719 {
719 {
@@ -474,7 +474,10 b' static Py_ssize_t basicencode(char *dest'
474 static const uint32_t twobytes[8] = {0, 0, 0x87fffffe};
474 static const uint32_t twobytes[8] = {0, 0, 0x87fffffe};
475
475
476 static const uint32_t onebyte[8] = {
476 static const uint32_t onebyte[8] = {
477 1, 0x2bff3bfa, 0x68000001, 0x2fffffff,
477 1,
478 0x2bff3bfa,
479 0x68000001,
480 0x2fffffff,
478 };
481 };
479
482
480 Py_ssize_t destlen = 0;
483 Py_ssize_t destlen = 0;
@@ -655,16 +658,10 b' static int sha1hash(char hash[20], const'
655 PyObject *shaobj, *hashobj;
658 PyObject *shaobj, *hashobj;
656
659
657 if (shafunc == NULL) {
660 if (shafunc == NULL) {
658 PyObject *hashlib, *name = PyBytes_FromString("hashlib");
661 PyObject *hashlib = PyImport_ImportModule("hashlib");
659
660 if (name == NULL)
661 return -1;
662
663 hashlib = PyImport_Import(name);
664 Py_DECREF(name);
665
666 if (hashlib == NULL) {
662 if (hashlib == NULL) {
667 PyErr_SetString(PyExc_ImportError, "hashlib");
663 PyErr_SetString(PyExc_ImportError,
664 "pathencode failed to find hashlib");
668 return -1;
665 return -1;
669 }
666 }
670 shafunc = PyObject_GetAttrString(hashlib, "sha1");
667 shafunc = PyObject_GetAttrString(hashlib, "sha1");
@@ -673,12 +670,12 b' static int sha1hash(char hash[20], const'
673 if (shafunc == NULL) {
670 if (shafunc == NULL) {
674 PyErr_SetString(PyExc_AttributeError,
671 PyErr_SetString(PyExc_AttributeError,
675 "module 'hashlib' has no "
672 "module 'hashlib' has no "
676 "attribute 'sha1'");
673 "attribute 'sha1' in pathencode");
677 return -1;
674 return -1;
678 }
675 }
679 }
676 }
680
677
681 shaobj = PyObject_CallFunction(shafunc, "s#", str, len);
678 shaobj = PyObject_CallFunction(shafunc, PY23("s#", "y#"), str, len);
682
679
683 if (shaobj == NULL)
680 if (shaobj == NULL)
684 return -1;
681 return -1;
@@ -248,6 +248,20 b' static const char *index_node(indexObjec'
248 return data ? data + 32 : NULL;
248 return data ? data + 32 : NULL;
249 }
249 }
250
250
251 /*
252 * Return the 20-byte SHA of the node corresponding to the given rev. The
253 * rev is assumed to be existing. If not, an exception is set.
254 */
255 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
256 {
257 const char *node = index_node(self, pos);
258 if (node == NULL) {
259 PyErr_Format(PyExc_IndexError, "could not access rev %d",
260 (int)pos);
261 }
262 return node;
263 }
264
251 static int nt_insert(indexObject *self, const char *node, int rev);
265 static int nt_insert(indexObject *self, const char *node, int rev);
252
266
253 static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen)
267 static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen)
@@ -1052,10 +1066,12 b' static int nt_insert(indexObject *self, '
1052 return 0;
1066 return 0;
1053 }
1067 }
1054 if (v < 0) {
1068 if (v < 0) {
1055 const char *oldnode = index_node(self, -(v + 1));
1069 const char *oldnode = index_node_existing(self, -(v + 1));
1056 int noff;
1070 int noff;
1057
1071
1058 if (!oldnode || !memcmp(oldnode, node, 20)) {
1072 if (oldnode == NULL)
1073 return -1;
1074 if (!memcmp(oldnode, node, 20)) {
1059 n->children[k] = -rev - 1;
1075 n->children[k] = -rev - 1;
1060 return 0;
1076 return 0;
1061 }
1077 }
@@ -1135,9 +1151,9 b' static int index_find_node(indexObject *'
1135 */
1151 */
1136 if (self->ntmisses++ < 4) {
1152 if (self->ntmisses++ < 4) {
1137 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1153 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1138 const char *n = index_node(self, rev);
1154 const char *n = index_node_existing(self, rev);
1139 if (n == NULL)
1155 if (n == NULL)
1140 return -2;
1156 return -3;
1141 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1157 if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
1142 if (nt_insert(self, n, rev) == -1)
1158 if (nt_insert(self, n, rev) == -1)
1143 return -3;
1159 return -3;
@@ -1146,11 +1162,9 b' static int index_find_node(indexObject *'
1146 }
1162 }
1147 } else {
1163 } else {
1148 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1164 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1149 const char *n = index_node(self, rev);
1165 const char *n = index_node_existing(self, rev);
1150 if (n == NULL) {
1166 if (n == NULL)
1151 self->ntrev = rev + 1;
1167 return -3;
1152 return -2;
1153 }
1154 if (nt_insert(self, n, rev) == -1) {
1168 if (nt_insert(self, n, rev) == -1) {
1155 self->ntrev = rev + 1;
1169 self->ntrev = rev + 1;
1156 return -3;
1170 return -3;
@@ -1216,27 +1230,84 b' static PyObject *index_getitem(indexObje'
1216 return NULL;
1230 return NULL;
1217 }
1231 }
1218
1232
1233 /*
1234 * Fully populate the radix tree.
1235 */
1236 static int nt_populate(indexObject *self) {
1237 int rev;
1238 if (self->ntrev > 0) {
1239 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1240 const char *n = index_node_existing(self, rev);
1241 if (n == NULL)
1242 return -1;
1243 if (nt_insert(self, n, rev) == -1)
1244 return -1;
1245 }
1246 self->ntrev = -1;
1247 }
1248 return 0;
1249 }
1250
1219 static int nt_partialmatch(indexObject *self, const char *node,
1251 static int nt_partialmatch(indexObject *self, const char *node,
1220 Py_ssize_t nodelen)
1252 Py_ssize_t nodelen)
1221 {
1253 {
1222 int rev;
1254 if (nt_init(self) == -1)
1255 return -3;
1256 if (nt_populate(self) == -1)
1257 return -3;
1258
1259 return nt_find(self, node, nodelen, 1);
1260 }
1261
1262 /*
1263 * Find the length of the shortest unique prefix of node.
1264 *
1265 * Return values:
1266 *
1267 * -3: error (exception set)
1268 * -2: not found (no exception set)
1269 * rest: length of shortest prefix
1270 */
1271 static int nt_shortest(indexObject *self, const char *node)
1272 {
1273 int level, off;
1223
1274
1224 if (nt_init(self) == -1)
1275 if (nt_init(self) == -1)
1225 return -3;
1276 return -3;
1277 if (nt_populate(self) == -1)
1278 return -3;
1226
1279
1227 if (self->ntrev > 0) {
1280 for (level = off = 0; level < 40; level++) {
1228 /* ensure that the radix tree is fully populated */
1281 int k, v;
1229 for (rev = self->ntrev - 1; rev >= 0; rev--) {
1282 nodetree *n = &self->nt[off];
1230 const char *n = index_node(self, rev);
1283 k = nt_level(node, level);
1284 v = n->children[k];
1285 if (v < 0) {
1286 const char *n;
1287 v = -(v + 1);
1288 n = index_node_existing(self, v);
1231 if (n == NULL)
1289 if (n == NULL)
1290 return -3;
1291 if (memcmp(node, n, 20) != 0)
1292 /*
1293 * Found a unique prefix, but it wasn't for the
1294 * requested node (i.e the requested node does
1295 * not exist).
1296 */
1232 return -2;
1297 return -2;
1233 if (nt_insert(self, n, rev) == -1)
1298 return level + 1;
1234 return -3;
1235 }
1299 }
1236 self->ntrev = rev;
1300 if (v == 0)
1301 return -2;
1302 off = v;
1237 }
1303 }
1238
1304 /*
1239 return nt_find(self, node, nodelen, 1);
1305 * The node was still not unique after 40 hex digits, so this won't
1306 * happen. Also, if we get here, then there's a programming error in
1307 * this file that made us insert a node longer than 40 hex digits.
1308 */
1309 PyErr_SetString(PyExc_Exception, "broken node tree");
1310 return -3;
1240 }
1311 }
1241
1312
1242 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
1313 static PyObject *index_partialmatch(indexObject *self, PyObject *args)
@@ -1249,7 +1320,7 b' static PyObject *index_partialmatch(inde'
1249 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
1320 if (!PyArg_ParseTuple(args, PY23("s#", "y#"), &node, &nodelen))
1250 return NULL;
1321 return NULL;
1251
1322
1252 if (nodelen < 4) {
1323 if (nodelen < 1) {
1253 PyErr_SetString(PyExc_ValueError, "key too short");
1324 PyErr_SetString(PyExc_ValueError, "key too short");
1254 return NULL;
1325 return NULL;
1255 }
1326 }
@@ -1280,15 +1351,36 b' static PyObject *index_partialmatch(inde'
1280 return PyBytes_FromStringAndSize(nullid, 20);
1351 return PyBytes_FromStringAndSize(nullid, 20);
1281 }
1352 }
1282
1353
1283 fullnode = index_node(self, rev);
1354 fullnode = index_node_existing(self, rev);
1284 if (fullnode == NULL) {
1355 if (fullnode == NULL) {
1285 PyErr_Format(PyExc_IndexError,
1286 "could not access rev %d", rev);
1287 return NULL;
1356 return NULL;
1288 }
1357 }
1289 return PyBytes_FromStringAndSize(fullnode, 20);
1358 return PyBytes_FromStringAndSize(fullnode, 20);
1290 }
1359 }
1291
1360
1361 static PyObject *index_shortest(indexObject *self, PyObject *args)
1362 {
1363 Py_ssize_t nodelen;
1364 PyObject *val;
1365 char *node;
1366 int length;
1367
1368 if (!PyArg_ParseTuple(args, "O", &val))
1369 return NULL;
1370 if (node_check(val, &node, &nodelen) == -1)
1371 return NULL;
1372
1373 self->ntlookups++;
1374 length = nt_shortest(self, node);
1375 if (length == -3)
1376 return NULL;
1377 if (length == -2) {
1378 raise_revlog_error();
1379 return NULL;
1380 }
1381 return PyInt_FromLong(length);
1382 }
1383
1292 static PyObject *index_m_get(indexObject *self, PyObject *args)
1384 static PyObject *index_m_get(indexObject *self, PyObject *args)
1293 {
1385 {
1294 Py_ssize_t nodelen;
1386 Py_ssize_t nodelen;
@@ -1758,10 +1850,11 b' static int index_slice_del(indexObject *'
1758 Py_ssize_t i;
1850 Py_ssize_t i;
1759
1851
1760 for (i = start + 1; i < self->length - 1; i++) {
1852 for (i = start + 1; i < self->length - 1; i++) {
1761 const char *node = index_node(self, i);
1853 const char *node = index_node_existing(self, i);
1854 if (node == NULL)
1855 return -1;
1762
1856
1763 if (node)
1857 nt_insert(self, node, -1);
1764 nt_insert(self, node, -1);
1765 }
1858 }
1766 if (self->added)
1859 if (self->added)
1767 nt_invalidate_added(self, 0);
1860 nt_invalidate_added(self, 0);
@@ -1977,6 +2070,8 b' static PyMethodDef index_methods[] = {'
1977 "insert an index entry"},
2070 "insert an index entry"},
1978 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
2071 {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
1979 "match a potentially ambiguous node ID"},
2072 "match a potentially ambiguous node ID"},
2073 {"shortest", (PyCFunction)index_shortest, METH_VARARGS,
2074 "find length of shortest hex nodeid of a binary ID"},
1980 {"stats", (PyCFunction)index_stats, METH_NOARGS,
2075 {"stats", (PyCFunction)index_stats, METH_NOARGS,
1981 "stats for the index"},
2076 "stats for the index"},
1982 {NULL} /* Sentinel */
2077 {NULL} /* Sentinel */
@@ -9,7 +9,6 b' from __future__ import absolute_import'
9
9
10 import os
10 import os
11 import struct
11 import struct
12 import tempfile
13 import weakref
12 import weakref
14
13
15 from .i18n import _
14 from .i18n import _
@@ -80,7 +79,7 b' def writechunks(ui, chunks, filename, vf'
80 # small (4k is common on Linux).
79 # small (4k is common on Linux).
81 fh = open(filename, "wb", 131072)
80 fh = open(filename, "wb", 131072)
82 else:
81 else:
83 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
82 fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
84 fh = os.fdopen(fd, r"wb")
83 fh = os.fdopen(fd, r"wb")
85 cleanup = filename
84 cleanup = filename
86 for c in chunks:
85 for c in chunks:
@@ -238,18 +237,16 b' class cg1unpacker(object):'
238 pos = next
237 pos = next
239 yield closechunk()
238 yield closechunk()
240
239
241 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
240 def _unpackmanifests(self, repo, revmap, trp, prog):
242 # We know that we'll never have more manifests than we had
241 self.callback = prog.increment
243 # changesets.
244 self.callback = prog(_('manifests'), numchanges)
245 # no need to check for empty manifest group here:
242 # no need to check for empty manifest group here:
246 # if the result of the merge of 1 and 2 is the same in 3 and 4,
243 # if the result of the merge of 1 and 2 is the same in 3 and 4,
247 # no new manifest will be created and the manifest group will
244 # no new manifest will be created and the manifest group will
248 # be empty during the pull
245 # be empty during the pull
249 self.manifestheader()
246 self.manifestheader()
250 deltas = self.deltaiter()
247 deltas = self.deltaiter()
251 repo.manifestlog._revlog.addgroup(deltas, revmap, trp)
248 repo.manifestlog.addgroup(deltas, revmap, trp)
252 repo.ui.progress(_('manifests'), None)
249 prog.complete()
253 self.callback = None
250 self.callback = None
254
251
255 def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
252 def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
@@ -294,16 +291,9 b' class cg1unpacker(object):'
294 # pull off the changeset group
291 # pull off the changeset group
295 repo.ui.status(_("adding changesets\n"))
292 repo.ui.status(_("adding changesets\n"))
296 clstart = len(cl)
293 clstart = len(cl)
297 class prog(object):
294 progress = repo.ui.makeprogress(_('changesets'), unit=_('chunks'),
298 def __init__(self, step, total):
295 total=expectedtotal)
299 self._step = step
296 self.callback = progress.increment
300 self._total = total
301 self._count = 1
302 def __call__(self):
303 repo.ui.progress(self._step, self._count, unit=_('chunks'),
304 total=self._total)
305 self._count += 1
306 self.callback = prog(_('changesets'), expectedtotal)
307
297
308 efiles = set()
298 efiles = set()
309 def onchangelog(cl, node):
299 def onchangelog(cl, node):
@@ -319,12 +309,16 b' class cg1unpacker(object):'
319 config='warn-empty-changegroup')
309 config='warn-empty-changegroup')
320 clend = len(cl)
310 clend = len(cl)
321 changesets = clend - clstart
311 changesets = clend - clstart
322 repo.ui.progress(_('changesets'), None)
312 progress.complete()
323 self.callback = None
313 self.callback = None
324
314
325 # pull off the manifest group
315 # pull off the manifest group
326 repo.ui.status(_("adding manifests\n"))
316 repo.ui.status(_("adding manifests\n"))
327 self._unpackmanifests(repo, revmap, trp, prog, changesets)
317 # We know that we'll never have more manifests than we had
318 # changesets.
319 progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
320 total=changesets)
321 self._unpackmanifests(repo, revmap, trp, progress)
328
322
329 needfiles = {}
323 needfiles = {}
330 if repo.ui.configbool('server', 'validate'):
324 if repo.ui.configbool('server', 'validate'):
@@ -476,9 +470,8 b' class cg3unpacker(cg2unpacker):'
476 node, p1, p2, deltabase, cs, flags = headertuple
470 node, p1, p2, deltabase, cs, flags = headertuple
477 return node, p1, p2, deltabase, cs, flags
471 return node, p1, p2, deltabase, cs, flags
478
472
479 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
473 def _unpackmanifests(self, repo, revmap, trp, prog):
480 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog,
474 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog)
481 numchanges)
482 for chunkdata in iter(self.filelogheader, {}):
475 for chunkdata in iter(self.filelogheader, {}):
483 # If we get here, there are directory manifests in the changegroup
476 # If we get here, there are directory manifests in the changegroup
484 d = chunkdata["filename"]
477 d = chunkdata["filename"]
@@ -523,7 +516,6 b' class cg1packer(object):'
523 reorder = stringutil.parsebool(reorder)
516 reorder = stringutil.parsebool(reorder)
524 self._repo = repo
517 self._repo = repo
525 self._reorder = reorder
518 self._reorder = reorder
526 self._progress = repo.ui.progress
527 if self._repo.ui.verbose and not self._repo.ui.debugflag:
519 if self._repo.ui.verbose and not self._repo.ui.debugflag:
528 self._verbosenote = self._repo.ui.note
520 self._verbosenote = self._repo.ui.note
529 else:
521 else:
@@ -572,18 +564,20 b' class cg1packer(object):'
572 revs.insert(0, p)
564 revs.insert(0, p)
573
565
574 # build deltas
566 # build deltas
575 total = len(revs) - 1
567 progress = None
576 msgbundling = _('bundling')
568 if units is not None:
569 progress = self._repo.ui.makeprogress(_('bundling'), unit=units,
570 total=(len(revs) - 1))
577 for r in xrange(len(revs) - 1):
571 for r in xrange(len(revs) - 1):
578 if units is not None:
572 if progress:
579 self._progress(msgbundling, r + 1, unit=units, total=total)
573 progress.update(r + 1)
580 prev, curr = revs[r], revs[r + 1]
574 prev, curr = revs[r], revs[r + 1]
581 linknode = lookup(revlog.node(curr))
575 linknode = lookup(revlog.node(curr))
582 for c in self.revchunk(revlog, curr, prev, linknode):
576 for c in self.revchunk(revlog, curr, prev, linknode):
583 yield c
577 yield c
584
578
585 if units is not None:
579 if progress:
586 self._progress(msgbundling, None)
580 progress.complete()
587 yield self.close()
581 yield self.close()
588
582
589 # filter any nodes that claim to be part of the known set
583 # filter any nodes that claim to be part of the known set
@@ -749,12 +743,8 b' class cg1packer(object):'
749 # The 'source' parameter is useful for extensions
743 # The 'source' parameter is useful for extensions
750 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
744 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
751 repo = self._repo
745 repo = self._repo
752 progress = self._progress
746 progress = repo.ui.makeprogress(_('bundling'), unit=_('files'),
753 msgbundling = _('bundling')
747 total=len(changedfiles))
754
755 total = len(changedfiles)
756 # for progress output
757 msgfiles = _('files')
758 for i, fname in enumerate(sorted(changedfiles)):
748 for i, fname in enumerate(sorted(changedfiles)):
759 filerevlog = repo.file(fname)
749 filerevlog = repo.file(fname)
760 if not filerevlog:
750 if not filerevlog:
@@ -769,8 +759,7 b' class cg1packer(object):'
769
759
770 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
760 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
771 if filenodes:
761 if filenodes:
772 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
762 progress.update(i + 1, item=fname)
773 total=total)
774 h = self.fileheader(fname)
763 h = self.fileheader(fname)
775 size = len(h)
764 size = len(h)
776 yield h
765 yield h
@@ -778,7 +767,7 b' class cg1packer(object):'
778 size += len(chunk)
767 size += len(chunk)
779 yield chunk
768 yield chunk
780 self._verbosenote(_('%8.i %s\n') % (size, fname))
769 self._verbosenote(_('%8.i %s\n') % (size, fname))
781 progress(msgbundling, None)
770 progress.complete()
782
771
783 def deltaparent(self, revlog, rev, p1, p2, prev):
772 def deltaparent(self, revlog, rev, p1, p2, prev):
784 if not revlog.candelta(prev, rev):
773 if not revlog.candelta(prev, rev):
@@ -982,12 +971,13 b' def makestream(repo, outgoing, version, '
982 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
971 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
983 revisions = 0
972 revisions = 0
984 files = 0
973 files = 0
974 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
975 total=expectedfiles)
985 for chunkdata in iter(source.filelogheader, {}):
976 for chunkdata in iter(source.filelogheader, {}):
986 files += 1
977 files += 1
987 f = chunkdata["filename"]
978 f = chunkdata["filename"]
988 repo.ui.debug("adding %s revisions\n" % f)
979 repo.ui.debug("adding %s revisions\n" % f)
989 repo.ui.progress(_('files'), files, unit=_('files'),
980 progress.increment()
990 total=expectedfiles)
991 fl = repo.file(f)
981 fl = repo.file(f)
992 o = len(fl)
982 o = len(fl)
993 try:
983 try:
@@ -1008,7 +998,7 b' def _addchangegroupfiles(repo, source, r'
1008 _("received spurious file revlog entry"))
998 _("received spurious file revlog entry"))
1009 if not needs:
999 if not needs:
1010 del needfiles[f]
1000 del needfiles[f]
1011 repo.ui.progress(_('files'), None)
1001 progress.complete()
1012
1002
1013 for f, needs in needfiles.iteritems():
1003 for f, needs in needfiles.iteritems():
1014 fl = repo.file(f)
1004 fl = repo.file(f)
@@ -10,7 +10,6 b' from __future__ import absolute_import'
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import tempfile
14
13
15 from .i18n import _
14 from .i18n import _
16 from .node import (
15 from .node import (
@@ -36,8 +35,8 b' from . import ('
36 obsolete,
35 obsolete,
37 patch,
36 patch,
38 pathutil,
37 pathutil,
38 phases,
39 pycompat,
39 pycompat,
40 registrar,
41 revlog,
40 revlog,
42 rewriteutil,
41 rewriteutil,
43 scmutil,
42 scmutil,
@@ -203,17 +202,21 b' def setupwrapcolorwrite(ui):'
203 return oldwrite
202 return oldwrite
204
203
205 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
204 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
206 if usecurses:
205 try:
207 if testfile:
206 if usecurses:
208 recordfn = crecordmod.testdecorator(testfile,
207 if testfile:
209 crecordmod.testchunkselector)
208 recordfn = crecordmod.testdecorator(
210 else:
209 testfile, crecordmod.testchunkselector)
211 recordfn = crecordmod.chunkselector
210 else:
212
211 recordfn = crecordmod.chunkselector
213 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
212
214
213 return crecordmod.filterpatch(ui, originalhunks, recordfn,
215 else:
214 operation)
216 return patch.filterpatch(ui, originalhunks, operation)
215 except crecordmod.fallbackerror as e:
216 ui.warn('%s\n' % e.message)
217 ui.warn(_('falling back to text mode\n'))
218
219 return patch.filterpatch(ui, originalhunks, operation)
217
220
218 def recordfilter(ui, originalhunks, operation=None):
221 def recordfilter(ui, originalhunks, operation=None):
219 """ Prompts the user to filter the originalhunks and return a list of
222 """ Prompts the user to filter the originalhunks and return a list of
@@ -331,7 +334,7 b' def dorecord(ui, repo, commitfunc, cmdsu'
331 try:
334 try:
332 # backup continues
335 # backup continues
333 for f in tobackup:
336 for f in tobackup:
334 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
337 fd, tmpname = pycompat.mkstemp(prefix=f.replace('/', '_') + '.',
335 dir=backupdir)
338 dir=backupdir)
336 os.close(fd)
339 os.close(fd)
337 ui.debug('backup %r as %r\n' % (f, tmpname))
340 ui.debug('backup %r as %r\n' % (f, tmpname))
@@ -419,7 +422,7 b' class dirnode(object):'
419 Represent a directory in user working copy with information required for
422 Represent a directory in user working copy with information required for
420 the purpose of tersing its status.
423 the purpose of tersing its status.
421
424
422 path is the path to the directory
425 path is the path to the directory, without a trailing '/'
423
426
424 statuses is a set of statuses of all files in this directory (this includes
427 statuses is a set of statuses of all files in this directory (this includes
425 all the files in all the subdirectories too)
428 all the files in all the subdirectories too)
@@ -456,7 +459,7 b' class dirnode(object):'
456
459
457 # does the dirnode object for subdir exists
460 # does the dirnode object for subdir exists
458 if subdir not in self.subdirs:
461 if subdir not in self.subdirs:
459 subdirpath = os.path.join(self.path, subdir)
462 subdirpath = pathutil.join(self.path, subdir)
460 self.subdirs[subdir] = dirnode(subdirpath)
463 self.subdirs[subdir] = dirnode(subdirpath)
461
464
462 # try adding the file in subdir
465 # try adding the file in subdir
@@ -471,7 +474,7 b' class dirnode(object):'
471 def iterfilepaths(self):
474 def iterfilepaths(self):
472 """Yield (status, path) for files directly under this directory."""
475 """Yield (status, path) for files directly under this directory."""
473 for f, st in self.files:
476 for f, st in self.files:
474 yield st, os.path.join(self.path, f)
477 yield st, pathutil.join(self.path, f)
475
478
476 def tersewalk(self, terseargs):
479 def tersewalk(self, terseargs):
477 """
480 """
@@ -485,7 +488,7 b' class dirnode(object):'
485
488
486 1) All the files in the directory (including all the files in its
489 1) All the files in the directory (including all the files in its
487 subdirectories) share the same status and the user has asked us to terse
490 subdirectories) share the same status and the user has asked us to terse
488 that status. -> yield (status, dirpath)
491 that status. -> yield (status, dirpath). dirpath will end in '/'.
489
492
490 2) Otherwise, we do following:
493 2) Otherwise, we do following:
491
494
@@ -502,7 +505,7 b' class dirnode(object):'
502 # Making sure we terse only when the status abbreviation is
505 # Making sure we terse only when the status abbreviation is
503 # passed as terse argument
506 # passed as terse argument
504 if onlyst in terseargs:
507 if onlyst in terseargs:
505 yield onlyst, self.path + pycompat.ossep
508 yield onlyst, self.path + '/'
506 return
509 return
507
510
508 # add the files to status list
511 # add the files to status list
@@ -591,8 +594,8 b' To mark files as resolved: hg resolve -'
591 return _commentlines(msg)
594 return _commentlines(msg)
592
595
593 def _helpmessage(continuecmd, abortcmd):
596 def _helpmessage(continuecmd, abortcmd):
594 msg = _('To continue: %s\n'
597 msg = _('To continue: %s\n'
595 'To abort: %s') % (continuecmd, abortcmd)
598 'To abort: %s') % (continuecmd, abortcmd)
596 return _commentlines(msg)
599 return _commentlines(msg)
597
600
598 def _rebasemsg():
601 def _rebasemsg():
@@ -606,7 +609,7 b' def _unshelvemsg():'
606
609
607 def _updatecleanmsg(dest=None):
610 def _updatecleanmsg(dest=None):
608 warning = _('warning: this will discard uncommitted changes')
611 warning = _('warning: this will discard uncommitted changes')
609 return 'hg update --clean %s (%s)' % (dest or '.', warning)
612 return 'hg update --clean %s (%s)' % (dest or '.', warning)
610
613
611 def _graftmsg():
614 def _graftmsg():
612 # tweakdefaults requires `update` to have a rev hence the `.`
615 # tweakdefaults requires `update` to have a rev hence the `.`
@@ -633,7 +636,7 b' STATES = ('
633 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
636 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
634 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
637 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
635 ('graft', fileexistspredicate('graftstate'), _graftmsg),
638 ('graft', fileexistspredicate('graftstate'), _graftmsg),
636 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
639 ('unshelve', fileexistspredicate('shelvedstate'), _unshelvemsg),
637 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
640 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
638 # The merge state is part of a list that will be iterated over.
641 # The merge state is part of a list that will be iterated over.
639 # They need to be last because some of the other unfinished states may also
642 # They need to be last because some of the other unfinished states may also
@@ -787,16 +790,12 b' def changebranch(ui, repo, revs, label):'
787 extra=extra,
790 extra=extra,
788 branch=label)
791 branch=label)
789
792
790 commitphase = ctx.phase()
793 newnode = repo.commitctx(mc)
791 overrides = {('phases', 'new-commit'): commitphase}
792 with repo.ui.configoverride(overrides, 'branch-change'):
793 newnode = repo.commitctx(mc)
794
795 replacements[ctx.node()] = (newnode,)
794 replacements[ctx.node()] = (newnode,)
796 ui.debug('new node id is %s\n' % hex(newnode))
795 ui.debug('new node id is %s\n' % hex(newnode))
797
796
798 # create obsmarkers and move bookmarks
797 # create obsmarkers and move bookmarks
799 scmutil.cleanupnodes(repo, replacements, 'branch-change')
798 scmutil.cleanupnodes(repo, replacements, 'branch-change', fixphase=True)
800
799
801 # move the working copy too
800 # move the working copy too
802 wctx = repo[None]
801 wctx = repo[None]
@@ -1248,7 +1247,8 b' def copy(ui, repo, pats, opts, rename=Fa'
1248 dryrun=dryrun, cwd=cwd)
1247 dryrun=dryrun, cwd=cwd)
1249 if rename and not dryrun:
1248 if rename and not dryrun:
1250 if not after and srcexists and not samefile:
1249 if not after and srcexists and not samefile:
1251 repo.wvfs.unlinkpath(abssrc)
1250 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
1251 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1252 wctx.forget([abssrc])
1252 wctx.forget([abssrc])
1253
1253
1254 # pat: ossep
1254 # pat: ossep
@@ -1685,7 +1685,7 b' def showmarker(fm, marker, index=None):'
1685 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1685 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1686 meta = marker.metadata().copy()
1686 meta = marker.metadata().copy()
1687 meta.pop('date', None)
1687 meta.pop('date', None)
1688 smeta = util.rapply(pycompat.maybebytestr, meta)
1688 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
1689 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1689 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1690 fm.plain('\n')
1690 fm.plain('\n')
1691
1691
@@ -1884,10 +1884,14 b' def walkchangerevs(repo, match, opts, pr'
1884 yielding each context, the iterator will first call the prepare
1884 yielding each context, the iterator will first call the prepare
1885 function on each context in the window in forward order.'''
1885 function on each context in the window in forward order.'''
1886
1886
1887 allfiles = opts.get('all_files')
1887 follow = opts.get('follow') or opts.get('follow_first')
1888 follow = opts.get('follow') or opts.get('follow_first')
1888 revs = _walkrevs(repo, opts)
1889 revs = _walkrevs(repo, opts)
1889 if not revs:
1890 if not revs:
1890 return []
1891 return []
1892 if allfiles and len(revs) > 1:
1893 raise error.Abort(_("multiple revisions not supported with "
1894 "--all-files"))
1891 wanted = set()
1895 wanted = set()
1892 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1896 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1893 fncache = {}
1897 fncache = {}
@@ -1993,7 +1997,11 b' def walkchangerevs(repo, match, opts, pr'
1993 ctx = change(rev)
1997 ctx = change(rev)
1994 if not fns:
1998 if not fns:
1995 def fns_generator():
1999 def fns_generator():
1996 for f in ctx.files():
2000 if allfiles:
2001 fiter = iter(ctx)
2002 else:
2003 fiter = ctx.files()
2004 for f in fiter:
1997 if match(f):
2005 if match(f):
1998 yield f
2006 yield f
1999 fns = fns_generator()
2007 fns = fns_generator()
@@ -2137,15 +2145,13 b' def forget(ui, repo, match, prefix, expl'
2137 return bad, forgot
2145 return bad, forgot
2138
2146
2139 def files(ui, ctx, m, fm, fmt, subrepos):
2147 def files(ui, ctx, m, fm, fmt, subrepos):
2140 rev = ctx.rev()
2141 ret = 1
2148 ret = 1
2142 ds = ctx.repo().dirstate
2149
2143
2150 needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint()
2144 for f in ctx.matches(m):
2151 for f in ctx.matches(m):
2145 if rev is None and ds[f] == 'r':
2146 continue
2147 fm.startitem()
2152 fm.startitem()
2148 if ui.verbose:
2153 fm.context(ctx=ctx)
2154 if needsfctx:
2149 fc = ctx[f]
2155 fc = ctx[f]
2150 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2156 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2151 fm.data(abspath=f)
2157 fm.data(abspath=f)
@@ -2181,13 +2187,12 b' def remove(ui, repo, m, prefix, after, f'
2181 warn = False
2187 warn = False
2182
2188
2183 subs = sorted(wctx.substate)
2189 subs = sorted(wctx.substate)
2184 total = len(subs)
2190 progress = ui.makeprogress(_('searching'), total=len(subs),
2185 count = 0
2191 unit=_('subrepos'))
2186 for subpath in subs:
2192 for subpath in subs:
2187 count += 1
2188 submatch = matchmod.subdirmatcher(subpath, m)
2193 submatch = matchmod.subdirmatcher(subpath, m)
2189 if subrepos or m.exact(subpath) or any(submatch.files()):
2194 if subrepos or m.exact(subpath) or any(submatch.files()):
2190 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2195 progress.increment()
2191 sub = wctx.sub(subpath)
2196 sub = wctx.sub(subpath)
2192 try:
2197 try:
2193 if sub.removefiles(submatch, prefix, after, force, subrepos,
2198 if sub.removefiles(submatch, prefix, after, force, subrepos,
@@ -2196,13 +2201,13 b' def remove(ui, repo, m, prefix, after, f'
2196 except error.LookupError:
2201 except error.LookupError:
2197 warnings.append(_("skipping missing subrepository: %s\n")
2202 warnings.append(_("skipping missing subrepository: %s\n")
2198 % join(subpath))
2203 % join(subpath))
2199 ui.progress(_('searching'), None)
2204 progress.complete()
2200
2205
2201 # warn about failure to delete explicit files/dirs
2206 # warn about failure to delete explicit files/dirs
2202 deleteddirs = util.dirs(deleted)
2207 deleteddirs = util.dirs(deleted)
2203 files = m.files()
2208 files = m.files()
2204 total = len(files)
2209 progress = ui.makeprogress(_('deleting'), total=len(files),
2205 count = 0
2210 unit=_('files'))
2206 for f in files:
2211 for f in files:
2207 def insubrepo():
2212 def insubrepo():
2208 for subpath in wctx.substate:
2213 for subpath in wctx.substate:
@@ -2210,8 +2215,7 b' def remove(ui, repo, m, prefix, after, f'
2210 return True
2215 return True
2211 return False
2216 return False
2212
2217
2213 count += 1
2218 progress.increment()
2214 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2215 isdir = f in deleteddirs or wctx.hasdir(f)
2219 isdir = f in deleteddirs or wctx.hasdir(f)
2216 if (f in repo.dirstate or isdir or f == '.'
2220 if (f in repo.dirstate or isdir or f == '.'
2217 or insubrepo() or f in subs):
2221 or insubrepo() or f in subs):
@@ -2226,50 +2230,47 b' def remove(ui, repo, m, prefix, after, f'
2226 % m.rel(f))
2230 % m.rel(f))
2227 # missing files will generate a warning elsewhere
2231 # missing files will generate a warning elsewhere
2228 ret = 1
2232 ret = 1
2229 ui.progress(_('deleting'), None)
2233 progress.complete()
2230
2234
2231 if force:
2235 if force:
2232 list = modified + deleted + clean + added
2236 list = modified + deleted + clean + added
2233 elif after:
2237 elif after:
2234 list = deleted
2238 list = deleted
2235 remaining = modified + added + clean
2239 remaining = modified + added + clean
2236 total = len(remaining)
2240 progress = ui.makeprogress(_('skipping'), total=len(remaining),
2237 count = 0
2241 unit=_('files'))
2238 for f in remaining:
2242 for f in remaining:
2239 count += 1
2243 progress.increment()
2240 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2241 if ui.verbose or (f in files):
2244 if ui.verbose or (f in files):
2242 warnings.append(_('not removing %s: file still exists\n')
2245 warnings.append(_('not removing %s: file still exists\n')
2243 % m.rel(f))
2246 % m.rel(f))
2244 ret = 1
2247 ret = 1
2245 ui.progress(_('skipping'), None)
2248 progress.complete()
2246 else:
2249 else:
2247 list = deleted + clean
2250 list = deleted + clean
2248 total = len(modified) + len(added)
2251 progress = ui.makeprogress(_('skipping'),
2249 count = 0
2252 total=(len(modified) + len(added)),
2253 unit=_('files'))
2250 for f in modified:
2254 for f in modified:
2251 count += 1
2255 progress.increment()
2252 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2253 warnings.append(_('not removing %s: file is modified (use -f'
2256 warnings.append(_('not removing %s: file is modified (use -f'
2254 ' to force removal)\n') % m.rel(f))
2257 ' to force removal)\n') % m.rel(f))
2255 ret = 1
2258 ret = 1
2256 for f in added:
2259 for f in added:
2257 count += 1
2260 progress.increment()
2258 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2259 warnings.append(_("not removing %s: file has been marked for add"
2261 warnings.append(_("not removing %s: file has been marked for add"
2260 " (use 'hg forget' to undo add)\n") % m.rel(f))
2262 " (use 'hg forget' to undo add)\n") % m.rel(f))
2261 ret = 1
2263 ret = 1
2262 ui.progress(_('skipping'), None)
2264 progress.complete()
2263
2265
2264 list = sorted(list)
2266 list = sorted(list)
2265 total = len(list)
2267 progress = ui.makeprogress(_('deleting'), total=len(list),
2266 count = 0
2268 unit=_('files'))
2267 for f in list:
2269 for f in list:
2268 count += 1
2269 if ui.verbose or not m.exact(f):
2270 if ui.verbose or not m.exact(f):
2270 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2271 progress.increment()
2271 ui.status(_('removing %s\n') % m.rel(f))
2272 ui.status(_('removing %s\n') % m.rel(f))
2272 ui.progress(_('deleting'), None)
2273 progress.complete()
2273
2274
2274 if not dryrun:
2275 if not dryrun:
2275 with repo.wlock():
2276 with repo.wlock():
@@ -2277,7 +2278,9 b' def remove(ui, repo, m, prefix, after, f'
2277 for f in list:
2278 for f in list:
2278 if f in added:
2279 if f in added:
2279 continue # we never unlink added files on remove
2280 continue # we never unlink added files on remove
2280 repo.wvfs.unlinkpath(f, ignoremissing=True)
2281 rmdir = repo.ui.configbool('experimental',
2282 'removeemptydirs')
2283 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2281 repo[None].forget(list)
2284 repo[None].forget(list)
2282
2285
2283 if warn:
2286 if warn:
@@ -2295,6 +2298,7 b' def _updatecatformatter(fm, ctx, matcher'
2295 if decode:
2298 if decode:
2296 data = ctx.repo().wwritedata(path, data)
2299 data = ctx.repo().wwritedata(path, data)
2297 fm.startitem()
2300 fm.startitem()
2301 fm.context(ctx=ctx)
2298 fm.write('data', '%s', data)
2302 fm.write('data', '%s', data)
2299 fm.data(abspath=path, path=matcher.rel(path))
2303 fm.data(abspath=path, path=matcher.rel(path))
2300
2304
@@ -2541,21 +2545,19 b' def amend(ui, repo, old, extra, pats, op'
2541 # This not what we expect from amend.
2545 # This not what we expect from amend.
2542 return old.node()
2546 return old.node()
2543
2547
2548 commitphase = None
2544 if opts.get('secret'):
2549 if opts.get('secret'):
2545 commitphase = 'secret'
2550 commitphase = phases.secret
2546 else:
2551 newid = repo.commitctx(new)
2547 commitphase = old.phase()
2548 overrides = {('phases', 'new-commit'): commitphase}
2549 with ui.configoverride(overrides, 'amend'):
2550 newid = repo.commitctx(new)
2551
2552
2552 # Reroute the working copy parent to the new changeset
2553 # Reroute the working copy parent to the new changeset
2553 repo.setparents(newid, nullid)
2554 repo.setparents(newid, nullid)
2554 mapping = {old.node(): (newid,)}
2555 mapping = {old.node(): (newid,)}
2555 obsmetadata = None
2556 obsmetadata = None
2556 if opts.get('note'):
2557 if opts.get('note'):
2557 obsmetadata = {'note': opts['note']}
2558 obsmetadata = {'note': encoding.fromlocal(opts['note'])}
2558 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2559 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata,
2560 fixphase=True, targetphase=commitphase)
2559
2561
2560 # Fixing the dirstate because localrepo.commitctx does not update
2562 # Fixing the dirstate because localrepo.commitctx does not update
2561 # it. This is rather convenient because we did not need to update
2563 # it. This is rather convenient because we did not need to update
@@ -3002,12 +3004,6 b' def revert(ui, repo, ctx, parents, *pats'
3002
3004
3003 if not opts.get('dry_run'):
3005 if not opts.get('dry_run'):
3004 needdata = ('revert', 'add', 'undelete')
3006 needdata = ('revert', 'add', 'undelete')
3005 if _revertprefetch is not _revertprefetchstub:
3006 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, "
3007 "add a callback to 'scmutil.fileprefetchhooks'",
3008 '4.6', stacklevel=1)
3009 _revertprefetch(repo, ctx,
3010 *[actions[name][0] for name in needdata])
3011 oplist = [actions[name][0] for name in needdata]
3007 oplist = [actions[name][0] for name in needdata]
3012 prefetch = scmutil.prefetchfiles
3008 prefetch = scmutil.prefetchfiles
3013 matchfiles = scmutil.matchfiles
3009 matchfiles = scmutil.matchfiles
@@ -3026,12 +3022,6 b' def revert(ui, repo, ctx, parents, *pats'
3026 raise error.Abort("subrepository '%s' does not exist in %s!"
3022 raise error.Abort("subrepository '%s' does not exist in %s!"
3027 % (sub, short(ctx.node())))
3023 % (sub, short(ctx.node())))
3028
3024
3029 def _revertprefetchstub(repo, ctx, *files):
3030 """Stub method for detecting extension wrapping of _revertprefetch(), to
3031 issue a deprecation warning."""
3032
3033 _revertprefetch = _revertprefetchstub
3034
3035 def _performrevert(repo, parents, ctx, actions, interactive=False,
3025 def _performrevert(repo, parents, ctx, actions, interactive=False,
3036 tobackup=None):
3026 tobackup=None):
3037 """function that actually perform all the actions computed for revert
3027 """function that actually perform all the actions computed for revert
@@ -3051,7 +3041,8 b' def _performrevert(repo, parents, ctx, a'
3051
3041
3052 def doremove(f):
3042 def doremove(f):
3053 try:
3043 try:
3054 repo.wvfs.unlinkpath(f)
3044 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
3045 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3055 except OSError:
3046 except OSError:
3056 pass
3047 pass
3057 repo.dirstate.remove(f)
3048 repo.dirstate.remove(f)
@@ -3168,12 +3159,6 b' def _performrevert(repo, parents, ctx, a'
3168 if f in copied:
3159 if f in copied:
3169 repo.dirstate.copy(copied[f], f)
3160 repo.dirstate.copy(copied[f], f)
3170
3161
3171 class command(registrar.command):
3172 """deprecated: used registrar.command instead"""
3173 def _doregister(self, func, name, *args, **kwargs):
3174 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3175 return super(command, self)._doregister(func, name, *args, **kwargs)
3176
3177 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3162 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3178 # commands.outgoing. "missing" is "missing" of the result of
3163 # commands.outgoing. "missing" is "missing" of the result of
3179 # "findcommonoutgoing()"
3164 # "findcommonoutgoing()"
@@ -3198,7 +3183,7 b' summaryremotehooks = util.hooks()'
3198 # (state file, clearable, allowcommit, error, hint)
3183 # (state file, clearable, allowcommit, error, hint)
3199 unfinishedstates = [
3184 unfinishedstates = [
3200 ('graftstate', True, False, _('graft in progress'),
3185 ('graftstate', True, False, _('graft in progress'),
3201 _("use 'hg graft --continue' or 'hg update' to abort")),
3186 _("use 'hg graft --continue' or 'hg graft --stop' to stop")),
3202 ('updatestate', True, False, _('last update was interrupted'),
3187 ('updatestate', True, False, _('last update was interrupted'),
3203 _("use 'hg update' to get a consistent checkout"))
3188 _("use 'hg update' to get a consistent checkout"))
3204 ]
3189 ]
@@ -3285,23 +3270,3 b' def wrongtooltocontinue(repo, task):'
3285 if after[1]:
3270 if after[1]:
3286 hint = after[0]
3271 hint = after[0]
3287 raise error.Abort(_('no %s in progress') % task, hint=hint)
3272 raise error.Abort(_('no %s in progress') % task, hint=hint)
3288
3289 class changeset_printer(logcmdutil.changesetprinter):
3290
3291 def __init__(self, ui, *args, **kwargs):
3292 msg = ("'cmdutil.changeset_printer' is deprecated, "
3293 "use 'logcmdutil.logcmdutil'")
3294 ui.deprecwarn(msg, "4.6")
3295 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3296
3297 def displaygraph(ui, *args, **kwargs):
3298 msg = ("'cmdutil.displaygraph' is deprecated, "
3299 "use 'logcmdutil.displaygraph'")
3300 ui.deprecwarn(msg, "4.6")
3301 return logcmdutil.displaygraph(ui, *args, **kwargs)
3302
3303 def show_changeset(ui, *args, **kwargs):
3304 msg = ("'cmdutil.show_changeset' is deprecated, "
3305 "use 'logcmdutil.changesetdisplayer'")
3306 ui.deprecwarn(msg, "4.6")
3307 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
This diff has been collapsed as it changes many lines, (528 lines changed) Show them Hide them
@@ -40,7 +40,6 b' from . import ('
40 hbisect,
40 hbisect,
41 help,
41 help,
42 hg,
42 hg,
43 lock as lockmod,
44 logcmdutil,
43 logcmdutil,
45 merge as mergemod,
44 merge as mergemod,
46 obsolete,
45 obsolete,
@@ -50,10 +49,12 b' from . import ('
50 pycompat,
49 pycompat,
51 rcutil,
50 rcutil,
52 registrar,
51 registrar,
52 repair,
53 revsetlang,
53 revsetlang,
54 rewriteutil,
54 rewriteutil,
55 scmutil,
55 scmutil,
56 server,
56 server,
57 state as statemod,
57 streamclone,
58 streamclone,
58 tags as tagsmod,
59 tags as tagsmod,
59 templatekw,
60 templatekw,
@@ -63,12 +64,9 b' from . import ('
63 )
64 )
64 from .utils import (
65 from .utils import (
65 dateutil,
66 dateutil,
66 procutil,
67 stringutil,
67 stringutil,
68 )
68 )
69
69
70 release = lockmod.release
71
72 table = {}
70 table = {}
73 table.update(debugcommandsmod.command._table)
71 table.update(debugcommandsmod.command._table)
74
72
@@ -335,13 +333,13 b' def annotate(ui, repo, *pats, **opts):'
335 formatrev = formathex = pycompat.bytestr
333 formatrev = formathex = pycompat.bytestr
336
334
337 opmap = [('user', ' ', lambda x: x.fctx.user(), ui.shortuser),
335 opmap = [('user', ' ', lambda x: x.fctx.user(), ui.shortuser),
338 ('number', ' ', lambda x: x.fctx.rev(), formatrev),
336 ('rev', ' ', lambda x: x.fctx.rev(), formatrev),
339 ('changeset', ' ', lambda x: hexfn(x.fctx.node()), formathex),
337 ('node', ' ', lambda x: hexfn(x.fctx.node()), formathex),
340 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
338 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
341 ('file', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
339 ('file', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
342 ('line_number', ':', lambda x: x.lineno, pycompat.bytestr),
340 ('line_number', ':', lambda x: x.lineno, pycompat.bytestr),
343 ]
341 ]
344 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
342 opnamemap = {'rev': 'number', 'node': 'changeset'}
345
343
346 if (not opts.get('user') and not opts.get('changeset')
344 if (not opts.get('user') and not opts.get('changeset')
347 and not opts.get('date') and not opts.get('file')):
345 and not opts.get('date') and not opts.get('file')):
@@ -359,11 +357,12 b' def annotate(ui, repo, *pats, **opts):'
359 else:
357 else:
360 def makefunc(get, fmt):
358 def makefunc(get, fmt):
361 return get
359 return get
362 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
360 datahint = rootfm.datahint()
363 if opts.get(op)]
361 funcmap = [(makefunc(get, fmt), sep) for fn, sep, get, fmt in opmap
362 if opts.get(opnamemap.get(fn, fn)) or fn in datahint]
364 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
363 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
365 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
364 fields = ' '.join(fn for fn, sep, get, fmt in opmap
366 if opts.get(op))
365 if opts.get(opnamemap.get(fn, fn)) or fn in datahint)
367
366
368 def bad(x, y):
367 def bad(x, y):
369 raise error.Abort("%s: %s" % (x, y))
368 raise error.Abort("%s: %s" % (x, y))
@@ -560,13 +559,8 b' def backout(ui, repo, node=None, rev=Non'
560 Returns 0 on success, 1 if nothing to backout or there are unresolved
559 Returns 0 on success, 1 if nothing to backout or there are unresolved
561 files.
560 files.
562 '''
561 '''
563 wlock = lock = None
562 with repo.wlock(), repo.lock():
564 try:
565 wlock = repo.wlock()
566 lock = repo.lock()
567 return _dobackout(ui, repo, node, rev, **opts)
563 return _dobackout(ui, repo, node, rev, **opts)
568 finally:
569 release(lock, wlock)
570
564
571 def _dobackout(ui, repo, node=None, rev=None, **opts):
565 def _dobackout(ui, repo, node=None, rev=None, **opts):
572 opts = pycompat.byteskwargs(opts)
566 opts = pycompat.byteskwargs(opts)
@@ -617,21 +611,16 b' def _dobackout(ui, repo, node=None, rev='
617 bheads = repo.branchheads(branch)
611 bheads = repo.branchheads(branch)
618 rctx = scmutil.revsingle(repo, hex(parent))
612 rctx = scmutil.revsingle(repo, hex(parent))
619 if not opts.get('merge') and op1 != node:
613 if not opts.get('merge') and op1 != node:
620 dsguard = dirstateguard.dirstateguard(repo, 'backout')
614 with dirstateguard.dirstateguard(repo, 'backout'):
621 try:
615 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
622 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
616 with ui.configoverride(overrides, 'backout'):
623 'backout')
617 stats = mergemod.update(repo, parent, True, True, node, False)
624 stats = mergemod.update(repo, parent, True, True, node, False)
625 repo.setparents(op1, op2)
618 repo.setparents(op1, op2)
626 dsguard.close()
619 hg._showstats(repo, stats)
627 hg._showstats(repo, stats)
620 if stats.unresolvedcount:
628 if stats.unresolvedcount:
621 repo.ui.status(_("use 'hg resolve' to retry unresolved "
629 repo.ui.status(_("use 'hg resolve' to retry unresolved "
622 "file merges\n"))
630 "file merges\n"))
623 return 1
631 return 1
632 finally:
633 ui.setconfig('ui', 'forcemerge', '', '')
634 lockmod.release(dsguard)
635 else:
624 else:
636 hg.clean(repo, node, show_stats=False)
625 hg.clean(repo, node, show_stats=False)
637 repo.dirstate.setbranch(branch)
626 repo.dirstate.setbranch(branch)
@@ -667,12 +656,9 b' def _dobackout(ui, repo, node=None, rev='
667 hg.clean(repo, op1, show_stats=False)
656 hg.clean(repo, op1, show_stats=False)
668 ui.status(_('merging with changeset %s\n')
657 ui.status(_('merging with changeset %s\n')
669 % nice(repo.changelog.tip()))
658 % nice(repo.changelog.tip()))
670 try:
659 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
671 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
660 with ui.configoverride(overrides, 'backout'):
672 'backout')
673 return hg.merge(repo, hex(repo.changelog.tip()))
661 return hg.merge(repo, hex(repo.changelog.tip()))
674 finally:
675 ui.setconfig('ui', 'forcemerge', '', '')
676 return 0
662 return 0
677
663
678 @command('bisect',
664 @command('bisect',
@@ -1234,7 +1220,7 b' def bundle(ui, repo, fname, dest=None, *'
1234 other = hg.peer(repo, opts, dest)
1220 other = hg.peer(repo, opts, dest)
1235 revs = [repo[r].hex() for r in revs]
1221 revs = [repo[r].hex() for r in revs]
1236 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1222 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1237 heads = revs and map(repo.lookup, revs) or revs
1223 heads = revs and pycompat.maplist(repo.lookup, revs) or revs
1238 outgoing = discovery.findcommonoutgoing(repo, other,
1224 outgoing = discovery.findcommonoutgoing(repo, other,
1239 onlyheads=heads,
1225 onlyheads=heads,
1240 force=opts.get('force'),
1226 force=opts.get('force'),
@@ -1536,13 +1522,8 b' def commit(ui, repo, *pats, **opts):'
1536
1522
1537 hg commit --amend --date now
1523 hg commit --amend --date now
1538 """
1524 """
1539 wlock = lock = None
1525 with repo.wlock(), repo.lock():
1540 try:
1541 wlock = repo.wlock()
1542 lock = repo.lock()
1543 return _docommit(ui, repo, *pats, **opts)
1526 return _docommit(ui, repo, *pats, **opts)
1544 finally:
1545 release(lock, wlock)
1546
1527
1547 def _docommit(ui, repo, *pats, **opts):
1528 def _docommit(ui, repo, *pats, **opts):
1548 if opts.get(r'interactive'):
1529 if opts.get(r'interactive'):
@@ -1895,7 +1876,9 b' def diff(ui, repo, *pats, **opts):'
1895 root=opts.get('root'))
1876 root=opts.get('root'))
1896
1877
1897 @command('^export',
1878 @command('^export',
1898 [('o', 'output', '',
1879 [('B', 'bookmark', '',
1880 _('export changes only reachable by given bookmark')),
1881 ('o', 'output', '',
1899 _('print output to file with formatted name'), _('FORMAT')),
1882 _('print output to file with formatted name'), _('FORMAT')),
1900 ('', 'switch-parent', None, _('diff against the second parent')),
1883 ('', 'switch-parent', None, _('diff against the second parent')),
1901 ('r', 'rev', [], _('revisions to export'), _('REV')),
1884 ('r', 'rev', [], _('revisions to export'), _('REV')),
@@ -1938,6 +1921,9 b' def export(ui, repo, *changesets, **opts'
1938 of files it detects as binary. With -a, export will generate a
1921 of files it detects as binary. With -a, export will generate a
1939 diff anyway, probably with undesirable results.
1922 diff anyway, probably with undesirable results.
1940
1923
1924 With -B/--bookmark changesets reachable by the given bookmark are
1925 selected.
1926
1941 Use the -g/--git option to generate diffs in the git extended diff
1927 Use the -g/--git option to generate diffs in the git extended diff
1942 format. See :hg:`help diffs` for more information.
1928 format. See :hg:`help diffs` for more information.
1943
1929
@@ -1966,11 +1952,24 b' def export(ui, repo, *changesets, **opts'
1966 Returns 0 on success.
1952 Returns 0 on success.
1967 """
1953 """
1968 opts = pycompat.byteskwargs(opts)
1954 opts = pycompat.byteskwargs(opts)
1955 bookmark = opts.get('bookmark')
1969 changesets += tuple(opts.get('rev', []))
1956 changesets += tuple(opts.get('rev', []))
1970 if not changesets:
1957
1971 changesets = ['.']
1958 if bookmark and changesets:
1972 repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
1959 raise error.Abort(_("-r and -B are mutually exclusive"))
1973 revs = scmutil.revrange(repo, changesets)
1960
1961 if bookmark:
1962 if bookmark not in repo._bookmarks:
1963 raise error.Abort(_("bookmark '%s' not found") % bookmark)
1964
1965 revs = scmutil.bookmarkrevs(repo, bookmark)
1966 else:
1967 if not changesets:
1968 changesets = ['.']
1969
1970 repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
1971 revs = scmutil.revrange(repo, changesets)
1972
1974 if not revs:
1973 if not revs:
1975 raise error.Abort(_("export requires at least one changeset"))
1974 raise error.Abort(_("export requires at least one changeset"))
1976 if len(revs) > 1:
1975 if len(revs) > 1:
@@ -2108,8 +2107,12 b' def forget(ui, repo, *pats, **opts):'
2108 'graft',
2107 'graft',
2109 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2108 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2110 ('c', 'continue', False, _('resume interrupted graft')),
2109 ('c', 'continue', False, _('resume interrupted graft')),
2110 ('', 'stop', False, _('stop interrupted graft')),
2111 ('', 'abort', False, _('abort interrupted graft')),
2111 ('e', 'edit', False, _('invoke editor on commit messages')),
2112 ('e', 'edit', False, _('invoke editor on commit messages')),
2112 ('', 'log', None, _('append graft info to log message')),
2113 ('', 'log', None, _('append graft info to log message')),
2114 ('', 'no-commit', None,
2115 _("don't commit, just apply the changes in working directory")),
2113 ('f', 'force', False, _('force graft')),
2116 ('f', 'force', False, _('force graft')),
2114 ('D', 'currentdate', False,
2117 ('D', 'currentdate', False,
2115 _('record the current date as commit date')),
2118 _('record the current date as commit date')),
@@ -2143,10 +2146,7 b' def graft(ui, repo, *revs, **opts):'
2143 Once all conflicts are addressed, the graft process can be
2146 Once all conflicts are addressed, the graft process can be
2144 continued with the -c/--continue option.
2147 continued with the -c/--continue option.
2145
2148
2146 .. note::
2149 The -c/--continue option reapplies all the earlier options.
2147
2148 The -c/--continue option does not reapply earlier options, except
2149 for --force.
2150
2150
2151 .. container:: verbose
2151 .. container:: verbose
2152
2152
@@ -2188,6 +2188,10 b' def _dograft(ui, repo, *revs, **opts):'
2188
2188
2189 revs = list(revs)
2189 revs = list(revs)
2190 revs.extend(opts.get('rev'))
2190 revs.extend(opts.get('rev'))
2191 # a dict of data to be stored in state file
2192 statedata = {}
2193 # list of new nodes created by ongoing graft
2194 statedata['newnodes'] = []
2191
2195
2192 if not opts.get('user') and opts.get('currentuser'):
2196 if not opts.get('user') and opts.get('currentuser'):
2193 opts['user'] = ui.username()
2197 opts['user'] = ui.username()
@@ -2198,17 +2202,62 b' def _dograft(ui, repo, *revs, **opts):'
2198 **pycompat.strkwargs(opts))
2202 **pycompat.strkwargs(opts))
2199
2203
2200 cont = False
2204 cont = False
2201 if opts.get('continue'):
2205 if opts.get('no_commit'):
2206 if opts.get('edit'):
2207 raise error.Abort(_("cannot specify --no-commit and "
2208 "--edit together"))
2209 if opts.get('currentuser'):
2210 raise error.Abort(_("cannot specify --no-commit and "
2211 "--currentuser together"))
2212 if opts.get('currentdate'):
2213 raise error.Abort(_("cannot specify --no-commit and "
2214 "--currentdate together"))
2215 if opts.get('log'):
2216 raise error.Abort(_("cannot specify --no-commit and "
2217 "--log together"))
2218
2219 graftstate = statemod.cmdstate(repo, 'graftstate')
2220
2221 if opts.get('stop'):
2222 if opts.get('continue'):
2223 raise error.Abort(_("cannot use '--continue' and "
2224 "'--stop' together"))
2225 if opts.get('abort'):
2226 raise error.Abort(_("cannot use '--abort' and '--stop' together"))
2227
2228 if any((opts.get('edit'), opts.get('log'), opts.get('user'),
2229 opts.get('date'), opts.get('currentdate'),
2230 opts.get('currentuser'), opts.get('rev'))):
2231 raise error.Abort(_("cannot specify any other flag with '--stop'"))
2232 return _stopgraft(ui, repo, graftstate)
2233 elif opts.get('abort'):
2234 if opts.get('continue'):
2235 raise error.Abort(_("cannot use '--continue' and "
2236 "'--abort' together"))
2237 if any((opts.get('edit'), opts.get('log'), opts.get('user'),
2238 opts.get('date'), opts.get('currentdate'),
2239 opts.get('currentuser'), opts.get('rev'))):
2240 raise error.Abort(_("cannot specify any other flag with '--abort'"))
2241
2242 return _abortgraft(ui, repo, graftstate)
2243 elif opts.get('continue'):
2202 cont = True
2244 cont = True
2203 if revs:
2245 if revs:
2204 raise error.Abort(_("can't specify --continue and revisions"))
2246 raise error.Abort(_("can't specify --continue and revisions"))
2205 # read in unfinished revisions
2247 # read in unfinished revisions
2206 try:
2248 if graftstate.exists():
2207 nodes = repo.vfs.read('graftstate').splitlines()
2249 statedata = _readgraftstate(repo, graftstate)
2250 if statedata.get('date'):
2251 opts['date'] = statedata['date']
2252 if statedata.get('user'):
2253 opts['user'] = statedata['user']
2254 if statedata.get('log'):
2255 opts['log'] = True
2256 if statedata.get('no_commit'):
2257 opts['no_commit'] = statedata.get('no_commit')
2258 nodes = statedata['nodes']
2208 revs = [repo[node].rev() for node in nodes]
2259 revs = [repo[node].rev() for node in nodes]
2209 except IOError as inst:
2260 else:
2210 if inst.errno != errno.ENOENT:
2211 raise
2212 cmdutil.wrongtooltocontinue(repo, _('graft'))
2261 cmdutil.wrongtooltocontinue(repo, _('graft'))
2213 else:
2262 else:
2214 if not revs:
2263 if not revs:
@@ -2292,6 +2341,8 b' def _dograft(ui, repo, *revs, **opts):'
2292 if not revs:
2341 if not revs:
2293 return -1
2342 return -1
2294
2343
2344 if opts.get('no_commit'):
2345 statedata['no_commit'] = True
2295 for pos, ctx in enumerate(repo.set("%ld", revs)):
2346 for pos, ctx in enumerate(repo.set("%ld", revs)):
2296 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2347 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2297 ctx.description().split('\n', 1)[0])
2348 ctx.description().split('\n', 1)[0])
@@ -2312,60 +2363,134 b' def _dograft(ui, repo, *revs, **opts):'
2312 user = ctx.user()
2363 user = ctx.user()
2313 if opts.get('user'):
2364 if opts.get('user'):
2314 user = opts['user']
2365 user = opts['user']
2366 statedata['user'] = user
2315 date = ctx.date()
2367 date = ctx.date()
2316 if opts.get('date'):
2368 if opts.get('date'):
2317 date = opts['date']
2369 date = opts['date']
2370 statedata['date'] = date
2318 message = ctx.description()
2371 message = ctx.description()
2319 if opts.get('log'):
2372 if opts.get('log'):
2320 message += '\n(grafted from %s)' % ctx.hex()
2373 message += '\n(grafted from %s)' % ctx.hex()
2374 statedata['log'] = True
2321
2375
2322 # we don't merge the first commit when continuing
2376 # we don't merge the first commit when continuing
2323 if not cont:
2377 if not cont:
2324 # perform the graft merge with p1(rev) as 'ancestor'
2378 # perform the graft merge with p1(rev) as 'ancestor'
2325 try:
2379 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
2326 # ui.forcemerge is an internal variable, do not document
2380 with ui.configoverride(overrides, 'graft'):
2327 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2381 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'graft'])
2328 'graft')
2329 stats = mergemod.graft(repo, ctx, ctx.p1(),
2330 ['local', 'graft'])
2331 finally:
2332 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2333 # report any conflicts
2382 # report any conflicts
2334 if stats.unresolvedcount > 0:
2383 if stats.unresolvedcount > 0:
2335 # write out state for --continue
2384 # write out state for --continue
2336 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2385 nodes = [repo[rev].hex() for rev in revs[pos:]]
2337 repo.vfs.write('graftstate', ''.join(nodelines))
2386 statedata['nodes'] = nodes
2338 extra = ''
2387 stateversion = 1
2339 if opts.get('user'):
2388 graftstate.save(stateversion, statedata)
2340 extra += ' --user %s' % procutil.shellquote(opts['user'])
2389 hint = _("use 'hg resolve' and 'hg graft --continue'")
2341 if opts.get('date'):
2342 extra += ' --date %s' % procutil.shellquote(opts['date'])
2343 if opts.get('log'):
2344 extra += ' --log'
2345 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2346 raise error.Abort(
2390 raise error.Abort(
2347 _("unresolved conflicts, can't continue"),
2391 _("unresolved conflicts, can't continue"),
2348 hint=hint)
2392 hint=hint)
2349 else:
2393 else:
2350 cont = False
2394 cont = False
2351
2395
2352 # commit
2396 # commit if --no-commit is false
2353 node = repo.commit(text=message, user=user,
2397 if not opts.get('no_commit'):
2354 date=date, extra=extra, editor=editor)
2398 node = repo.commit(text=message, user=user, date=date, extra=extra,
2355 if node is None:
2399 editor=editor)
2356 ui.warn(
2400 if node is None:
2357 _('note: graft of %d:%s created no changes to commit\n') %
2401 ui.warn(
2358 (ctx.rev(), ctx))
2402 _('note: graft of %d:%s created no changes to commit\n') %
2403 (ctx.rev(), ctx))
2404 # checking that newnodes exist because old state files won't have it
2405 elif statedata.get('newnodes') is not None:
2406 statedata['newnodes'].append(node)
2359
2407
2360 # remove state when we complete successfully
2408 # remove state when we complete successfully
2361 if not opts.get('dry_run'):
2409 if not opts.get('dry_run'):
2362 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2410 graftstate.delete()
2363
2411
2412 return 0
2413
2414 def _abortgraft(ui, repo, graftstate):
2415 """abort the interrupted graft and rollbacks to the state before interrupted
2416 graft"""
2417 if not graftstate.exists():
2418 raise error.Abort(_("no interrupted graft to abort"))
2419 statedata = _readgraftstate(repo, graftstate)
2420 newnodes = statedata.get('newnodes')
2421 if newnodes is None:
2422 # and old graft state which does not have all the data required to abort
2423 # the graft
2424 raise error.Abort(_("cannot abort using an old graftstate"))
2425
2426 # changeset from which graft operation was started
2427 startctx = None
2428 if len(newnodes) > 0:
2429 startctx = repo[newnodes[0]].p1()
2430 else:
2431 startctx = repo['.']
2432 # whether to strip or not
2433 cleanup = False
2434 if newnodes:
2435 newnodes = [repo[r].rev() for r in newnodes]
2436 cleanup = True
2437 # checking that none of the newnodes turned public or is public
2438 immutable = [c for c in newnodes if not repo[c].mutable()]
2439 if immutable:
2440 repo.ui.warn(_("cannot clean up public changesets %s\n")
2441 % ', '.join(bytes(repo[r]) for r in immutable),
2442 hint=_("see 'hg help phases' for details"))
2443 cleanup = False
2444
2445 # checking that no new nodes are created on top of grafted revs
2446 desc = set(repo.changelog.descendants(newnodes))
2447 if desc - set(newnodes):
2448 repo.ui.warn(_("new changesets detected on destination "
2449 "branch, can't strip\n"))
2450 cleanup = False
2451
2452 if cleanup:
2453 with repo.wlock(), repo.lock():
2454 hg.updaterepo(repo, startctx.node(), overwrite=True)
2455 # stripping the new nodes created
2456 strippoints = [c.node() for c in repo.set("roots(%ld)",
2457 newnodes)]
2458 repair.strip(repo.ui, repo, strippoints, backup=False)
2459
2460 if not cleanup:
2461 # we don't update to the startnode if we can't strip
2462 startctx = repo['.']
2463 hg.updaterepo(repo, startctx.node(), overwrite=True)
2464
2465 ui.status(_("graft aborted\n"))
2466 ui.status(_("working directory is now at %s\n") % startctx.hex()[:12])
2467 graftstate.delete()
2468 return 0
2469
2470 def _readgraftstate(repo, graftstate):
2471 """read the graft state file and return a dict of the data stored in it"""
2472 try:
2473 return graftstate.read()
2474 except error.CorruptedState:
2475 nodes = repo.vfs.read('graftstate').splitlines()
2476 return {'nodes': nodes}
2477
2478 def _stopgraft(ui, repo, graftstate):
2479 """stop the interrupted graft"""
2480 if not graftstate.exists():
2481 raise error.Abort(_("no interrupted graft found"))
2482 pctx = repo['.']
2483 hg.updaterepo(repo, pctx.node(), overwrite=True)
2484 graftstate.delete()
2485 ui.status(_("stopped the interrupted graft\n"))
2486 ui.status(_("working directory is now at %s\n") % pctx.hex()[:12])
2364 return 0
2487 return 0
2365
2488
2366 @command('grep',
2489 @command('grep',
2367 [('0', 'print0', None, _('end fields with NUL')),
2490 [('0', 'print0', None, _('end fields with NUL')),
2368 ('', 'all', None, _('print all revisions that match')),
2491 ('', 'all', None, _('print all revisions that match (DEPRECATED) ')),
2492 ('', 'diff', None, _('print all revisions when the term was introduced '
2493 'or removed')),
2369 ('a', 'text', None, _('treat all files as text')),
2494 ('a', 'text', None, _('treat all files as text')),
2370 ('f', 'follow', None,
2495 ('f', 'follow', None,
2371 _('follow changeset history,'
2496 _('follow changeset history,'
@@ -2376,6 +2501,8 b' def _dograft(ui, repo, *revs, **opts):'
2376 ('n', 'line-number', None, _('print matching line numbers')),
2501 ('n', 'line-number', None, _('print matching line numbers')),
2377 ('r', 'rev', [],
2502 ('r', 'rev', [],
2378 _('only search files changed within revision range'), _('REV')),
2503 _('only search files changed within revision range'), _('REV')),
2504 ('', 'all-files', None,
2505 _('include all files in the changeset while grepping (EXPERIMENTAL)')),
2379 ('u', 'user', None, _('list the author (long with -v)')),
2506 ('u', 'user', None, _('list the author (long with -v)')),
2380 ('d', 'date', None, _('list the date (short with -q)')),
2507 ('d', 'date', None, _('list the date (short with -q)')),
2381 ] + formatteropts + walkopts,
2508 ] + formatteropts + walkopts,
@@ -2392,7 +2519,7 b' def grep(ui, repo, pattern, *pats, **opt'
2392 file in which it finds a match. To get it to print every revision
2519 file in which it finds a match. To get it to print every revision
2393 that contains a change in match status ("-" for a match that becomes
2520 that contains a change in match status ("-" for a match that becomes
2394 a non-match, or "+" for a non-match that becomes a match), use the
2521 a non-match, or "+" for a non-match that becomes a match), use the
2395 --all flag.
2522 --diff flag.
2396
2523
2397 PATTERN can be any Python (roughly Perl-compatible) regular
2524 PATTERN can be any Python (roughly Perl-compatible) regular
2398 expression.
2525 expression.
@@ -2404,6 +2531,17 b' def grep(ui, repo, pattern, *pats, **opt'
2404 Returns 0 if a match is found, 1 otherwise.
2531 Returns 0 if a match is found, 1 otherwise.
2405 """
2532 """
2406 opts = pycompat.byteskwargs(opts)
2533 opts = pycompat.byteskwargs(opts)
2534 diff = opts.get('all') or opts.get('diff')
2535 if diff and opts.get('all_files'):
2536 raise error.Abort(_('--diff and --all-files are mutually exclusive'))
2537 # TODO: remove "not opts.get('rev')" if --all-files -rMULTIREV gets working
2538 if opts.get('all_files') is None and not opts.get('rev') and not diff:
2539 # experimental config: commands.grep.all-files
2540 opts['all_files'] = ui.configbool('commands', 'grep.all-files')
2541 plaingrep = opts.get('all_files') and not opts.get('rev')
2542 if plaingrep:
2543 opts['rev'] = ['wdir()']
2544
2407 reflags = re.M
2545 reflags = re.M
2408 if opts.get('ignore_case'):
2546 if opts.get('ignore_case'):
2409 reflags |= re.I
2547 reflags |= re.I
@@ -2481,7 +2619,7 b' def grep(ui, repo, pattern, *pats, **opt'
2481 yield ('+', b[i])
2619 yield ('+', b[i])
2482
2620
2483 def display(fm, fn, ctx, pstates, states):
2621 def display(fm, fn, ctx, pstates, states):
2484 rev = ctx.rev()
2622 rev = scmutil.intrev(ctx)
2485 if fm.isplain():
2623 if fm.isplain():
2486 formatuser = ui.shortuser
2624 formatuser = ui.shortuser
2487 else:
2625 else:
@@ -2494,22 +2632,27 b' def grep(ui, repo, pattern, *pats, **opt'
2494 @util.cachefunc
2632 @util.cachefunc
2495 def binary():
2633 def binary():
2496 flog = getfile(fn)
2634 flog = getfile(fn)
2497 return stringutil.binary(flog.read(ctx.filenode(fn)))
2635 try:
2636 return stringutil.binary(flog.read(ctx.filenode(fn)))
2637 except error.WdirUnsupported:
2638 return ctx[fn].isbinary()
2498
2639
2499 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2640 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2500 if opts.get('all'):
2641 if diff:
2501 iter = difflinestates(pstates, states)
2642 iter = difflinestates(pstates, states)
2502 else:
2643 else:
2503 iter = [('', l) for l in states]
2644 iter = [('', l) for l in states]
2504 for change, l in iter:
2645 for change, l in iter:
2505 fm.startitem()
2646 fm.startitem()
2506 fm.data(node=fm.hexfunc(ctx.node()))
2647 fm.context(ctx=ctx)
2648 fm.data(node=fm.hexfunc(scmutil.binnode(ctx)))
2649
2507 cols = [
2650 cols = [
2508 ('filename', fn, True),
2651 ('filename', fn, True),
2509 ('rev', rev, True),
2652 ('rev', rev, not plaingrep),
2510 ('linenumber', l.linenum, opts.get('line_number')),
2653 ('linenumber', l.linenum, opts.get('line_number')),
2511 ]
2654 ]
2512 if opts.get('all'):
2655 if diff:
2513 cols.append(('change', change, True))
2656 cols.append(('change', change, True))
2514 cols.extend([
2657 cols.extend([
2515 ('user', formatuser(ctx.user()), opts.get('user')),
2658 ('user', formatuser(ctx.user()), opts.get('user')),
@@ -2569,8 +2712,10 b' def grep(ui, repo, pattern, *pats, **opt'
2569 fnode = ctx.filenode(fn)
2712 fnode = ctx.filenode(fn)
2570 except error.LookupError:
2713 except error.LookupError:
2571 continue
2714 continue
2572
2715 try:
2573 copied = flog.renamed(fnode)
2716 copied = flog.renamed(fnode)
2717 except error.WdirUnsupported:
2718 copied = ctx[fn].renamed()
2574 copy = follow and copied and copied[0]
2719 copy = follow and copied and copied[0]
2575 if copy:
2720 if copy:
2576 copies.setdefault(rev, {})[fn] = copy
2721 copies.setdefault(rev, {})[fn] = copy
@@ -2581,7 +2726,11 b' def grep(ui, repo, pattern, *pats, **opt'
2581 files.append(fn)
2726 files.append(fn)
2582
2727
2583 if fn not in matches[rev]:
2728 if fn not in matches[rev]:
2584 grepbody(fn, rev, flog.read(fnode))
2729 try:
2730 content = flog.read(fnode)
2731 except error.WdirUnsupported:
2732 content = ctx[fn].data()
2733 grepbody(fn, rev, content)
2585
2734
2586 pfn = copy or fn
2735 pfn = copy or fn
2587 if pfn not in matches[parent]:
2736 if pfn not in matches[parent]:
@@ -2607,7 +2756,7 b' def grep(ui, repo, pattern, *pats, **opt'
2607 if pstates or states:
2756 if pstates or states:
2608 r = display(fm, fn, ctx, pstates, states)
2757 r = display(fm, fn, ctx, pstates, states)
2609 found = found or r
2758 found = found or r
2610 if r and not opts.get('all'):
2759 if r and not diff:
2611 skip[fn] = True
2760 skip[fn] = True
2612 if copy:
2761 if copy:
2613 skip[copy] = True
2762 skip[copy] = True
@@ -3071,69 +3220,62 b' def import_(ui, repo, patch1=None, *patc'
3071 raise error.Abort(_('cannot use --exact with --prefix'))
3220 raise error.Abort(_('cannot use --exact with --prefix'))
3072
3221
3073 base = opts["base"]
3222 base = opts["base"]
3074 wlock = dsguard = lock = tr = None
3075 msgs = []
3223 msgs = []
3076 ret = 0
3224 ret = 0
3077
3225
3078
3226 with repo.wlock():
3079 try:
3080 wlock = repo.wlock()
3081
3082 if update:
3227 if update:
3083 cmdutil.checkunfinished(repo)
3228 cmdutil.checkunfinished(repo)
3084 if (exact or not opts.get('force')):
3229 if (exact or not opts.get('force')):
3085 cmdutil.bailifchanged(repo)
3230 cmdutil.bailifchanged(repo)
3086
3231
3087 if not opts.get('no_commit'):
3232 if not opts.get('no_commit'):
3088 lock = repo.lock()
3233 lock = repo.lock
3089 tr = repo.transaction('import')
3234 tr = lambda: repo.transaction('import')
3235 dsguard = util.nullcontextmanager
3090 else:
3236 else:
3091 dsguard = dirstateguard.dirstateguard(repo, 'import')
3237 lock = util.nullcontextmanager
3092 parents = repo[None].parents()
3238 tr = util.nullcontextmanager
3093 for patchurl in patches:
3239 dsguard = lambda: dirstateguard.dirstateguard(repo, 'import')
3094 if patchurl == '-':
3240 with lock(), tr(), dsguard():
3095 ui.status(_('applying patch from stdin\n'))
3241 parents = repo[None].parents()
3096 patchfile = ui.fin
3242 for patchurl in patches:
3097 patchurl = 'stdin' # for error message
3243 if patchurl == '-':
3098 else:
3244 ui.status(_('applying patch from stdin\n'))
3099 patchurl = os.path.join(base, patchurl)
3245 patchfile = ui.fin
3100 ui.status(_('applying %s\n') % patchurl)
3246 patchurl = 'stdin' # for error message
3101 patchfile = hg.openpath(ui, patchurl)
3102
3103 haspatch = False
3104 for hunk in patch.split(patchfile):
3105 with patch.extract(ui, hunk) as patchdata:
3106 msg, node, rej = cmdutil.tryimportone(ui, repo, patchdata,
3107 parents, opts,
3108 msgs, hg.clean)
3109 if msg:
3110 haspatch = True
3111 ui.note(msg + '\n')
3112 if update or exact:
3113 parents = repo[None].parents()
3114 else:
3247 else:
3115 parents = [repo[node]]
3248 patchurl = os.path.join(base, patchurl)
3116 if rej:
3249 ui.status(_('applying %s\n') % patchurl)
3117 ui.write_err(_("patch applied partially\n"))
3250 patchfile = hg.openpath(ui, patchurl)
3118 ui.write_err(_("(fix the .rej files and run "
3251
3119 "`hg commit --amend`)\n"))
3252 haspatch = False
3120 ret = 1
3253 for hunk in patch.split(patchfile):
3121 break
3254 with patch.extract(ui, hunk) as patchdata:
3122
3255 msg, node, rej = cmdutil.tryimportone(ui, repo,
3123 if not haspatch:
3256 patchdata,
3124 raise error.Abort(_('%s: no diffs found') % patchurl)
3257 parents, opts,
3125
3258 msgs, hg.clean)
3126 if tr:
3259 if msg:
3127 tr.close()
3260 haspatch = True
3128 if msgs:
3261 ui.note(msg + '\n')
3129 repo.savecommitmessage('\n* * *\n'.join(msgs))
3262 if update or exact:
3130 if dsguard:
3263 parents = repo[None].parents()
3131 dsguard.close()
3264 else:
3265 parents = [repo[node]]
3266 if rej:
3267 ui.write_err(_("patch applied partially\n"))
3268 ui.write_err(_("(fix the .rej files and run "
3269 "`hg commit --amend`)\n"))
3270 ret = 1
3271 break
3272
3273 if not haspatch:
3274 raise error.Abort(_('%s: no diffs found') % patchurl)
3275
3276 if msgs:
3277 repo.savecommitmessage('\n* * *\n'.join(msgs))
3132 return ret
3278 return ret
3133 finally:
3134 if tr:
3135 tr.release()
3136 release(lock, dsguard, wlock)
3137
3279
3138 @command('incoming|in',
3280 @command('incoming|in',
3139 [('f', 'force', None,
3281 [('f', 'force', None,
@@ -3291,7 +3433,13 b' def locate(ui, repo, *pats, **opts):'
3291 badfn=lambda x, y: False)
3433 badfn=lambda x, y: False)
3292
3434
3293 ui.pager('locate')
3435 ui.pager('locate')
3294 for abs in ctx.matches(m):
3436 if ctx.rev() is None:
3437 # When run on the working copy, "locate" includes removed files, so
3438 # we get the list of files from the dirstate.
3439 filesgen = sorted(repo.dirstate.matches(m))
3440 else:
3441 filesgen = ctx.matches(m)
3442 for abs in filesgen:
3295 if opts.get('fullpath'):
3443 if opts.get('fullpath'):
3296 ui.write(repo.wjoin(abs), end)
3444 ui.write(repo.wjoin(abs), end)
3297 else:
3445 else:
@@ -3545,6 +3693,7 b' def manifest(ui, repo, node=None, rev=No'
3545 ui.pager('manifest')
3693 ui.pager('manifest')
3546 for f in ctx:
3694 for f in ctx:
3547 fm.startitem()
3695 fm.startitem()
3696 fm.context(ctx=ctx)
3548 fl = ctx[f].flags()
3697 fl = ctx[f].flags()
3549 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3698 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3550 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3699 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
@@ -3623,15 +3772,13 b' def merge(ui, repo, node=None, **opts):'
3623 displayer.close()
3772 displayer.close()
3624 return 0
3773 return 0
3625
3774
3626 try:
3775 # ui.forcemerge is an internal variable, do not document
3627 # ui.forcemerge is an internal variable, do not document
3776 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
3628 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3777 with ui.configoverride(overrides, 'merge'):
3629 force = opts.get('force')
3778 force = opts.get('force')
3630 labels = ['working copy', 'merge rev']
3779 labels = ['working copy', 'merge rev']
3631 return hg.merge(repo, node, force=force, mergeforce=force,
3780 return hg.merge(repo, node, force=force, mergeforce=force,
3632 labels=labels, abort=abort)
3781 labels=labels, abort=abort)
3633 finally:
3634 ui.setconfig('ui', 'forcemerge', '', 'merge')
3635
3782
3636 @command('outgoing|out',
3783 @command('outgoing|out',
3637 [('f', 'force', None, _('run even when the destination is unrelated')),
3784 [('f', 'force', None, _('run even when the destination is unrelated')),
@@ -3679,6 +3826,13 b' def outgoing(ui, repo, dest=None, **opts'
3679
3826
3680 Returns 0 if there are outgoing changes, 1 otherwise.
3827 Returns 0 if there are outgoing changes, 1 otherwise.
3681 """
3828 """
3829 # hg._outgoing() needs to re-resolve the path in order to handle #branch
3830 # style URLs, so don't overwrite dest.
3831 path = ui.paths.getpath(dest, default=('default-push', 'default'))
3832 if not path:
3833 raise error.Abort(_('default repository not configured!'),
3834 hint=_("see 'hg help config.paths'"))
3835
3682 opts = pycompat.byteskwargs(opts)
3836 opts = pycompat.byteskwargs(opts)
3683 if opts.get('graph'):
3837 if opts.get('graph'):
3684 logcmdutil.checkunsupportedgraphflags([], opts)
3838 logcmdutil.checkunsupportedgraphflags([], opts)
@@ -3696,8 +3850,7 b' def outgoing(ui, repo, dest=None, **opts'
3696 return 0
3850 return 0
3697
3851
3698 if opts.get('bookmarks'):
3852 if opts.get('bookmarks'):
3699 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3853 dest = path.pushloc or path.loc
3700 dest, branches = hg.parseurl(dest, opts.get('branch'))
3701 other = hg.peer(repo, opts, dest)
3854 other = hg.peer(repo, opts, dest)
3702 if 'bookmarks' not in other.listkeys('namespaces'):
3855 if 'bookmarks' not in other.listkeys('namespaces'):
3703 ui.warn(_("remote doesn't support bookmarks\n"))
3856 ui.warn(_("remote doesn't support bookmarks\n"))
@@ -3706,7 +3859,7 b' def outgoing(ui, repo, dest=None, **opts'
3706 ui.pager('outgoing')
3859 ui.pager('outgoing')
3707 return bookmarks.outgoing(ui, repo, other)
3860 return bookmarks.outgoing(ui, repo, other)
3708
3861
3709 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3862 repo._subtoppath = path.pushloc or path.loc
3710 try:
3863 try:
3711 return hg.outgoing(ui, repo, dest, opts)
3864 return hg.outgoing(ui, repo, dest, opts)
3712 finally:
3865 finally:
@@ -4391,7 +4544,8 b' def resolve(ui, repo, *pats, **opts):'
4391 ui.pager('resolve')
4544 ui.pager('resolve')
4392 fm = ui.formatter('resolve', opts)
4545 fm = ui.formatter('resolve', opts)
4393 ms = mergemod.mergestate.read(repo)
4546 ms = mergemod.mergestate.read(repo)
4394 m = scmutil.match(repo[None], pats, opts)
4547 wctx = repo[None]
4548 m = scmutil.match(wctx, pats, opts)
4395
4549
4396 # Labels and keys based on merge state. Unresolved path conflicts show
4550 # Labels and keys based on merge state. Unresolved path conflicts show
4397 # as 'P'. Resolved path conflicts show as 'R', the same as normal
4551 # as 'P'. Resolved path conflicts show as 'R', the same as normal
@@ -4411,6 +4565,7 b' def resolve(ui, repo, *pats, **opts):'
4411
4565
4412 label, key = mergestateinfo[ms[f]]
4566 label, key = mergestateinfo[ms[f]]
4413 fm.startitem()
4567 fm.startitem()
4568 fm.context(ctx=wctx)
4414 fm.condwrite(not nostatus, 'status', '%s ', key, label=label)
4569 fm.condwrite(not nostatus, 'status', '%s ', key, label=label)
4415 fm.write('path', '%s\n', f, label=label)
4570 fm.write('path', '%s\n', f, label=label)
4416 fm.end()
4571 fm.end()
@@ -4488,15 +4643,14 b' def resolve(ui, repo, *pats, **opts):'
4488
4643
4489 try:
4644 try:
4490 # preresolve file
4645 # preresolve file
4491 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4646 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
4492 'resolve')
4647 with ui.configoverride(overrides, 'resolve'):
4493 complete, r = ms.preresolve(f, wctx)
4648 complete, r = ms.preresolve(f, wctx)
4494 if not complete:
4649 if not complete:
4495 tocomplete.append(f)
4650 tocomplete.append(f)
4496 elif r:
4651 elif r:
4497 ret = 1
4652 ret = 1
4498 finally:
4653 finally:
4499 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4500 ms.commit()
4654 ms.commit()
4501
4655
4502 # replace filemerge's .orig file with our resolve file, but only
4656 # replace filemerge's .orig file with our resolve file, but only
@@ -4512,13 +4666,12 b' def resolve(ui, repo, *pats, **opts):'
4512 for f in tocomplete:
4666 for f in tocomplete:
4513 try:
4667 try:
4514 # resolve file
4668 # resolve file
4515 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4669 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
4516 'resolve')
4670 with ui.configoverride(overrides, 'resolve'):
4517 r = ms.resolve(f, wctx)
4671 r = ms.resolve(f, wctx)
4518 if r:
4672 if r:
4519 ret = 1
4673 ret = 1
4520 finally:
4674 finally:
4521 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4522 ms.commit()
4675 ms.commit()
4523
4676
4524 # replace filemerge's .orig file with our resolve file
4677 # replace filemerge's .orig file with our resolve file
@@ -4747,7 +4900,8 b' def root(ui, repo):'
4747 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4900 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4748 ('', 'style', '', _('template style to use'), _('STYLE')),
4901 ('', 'style', '', _('template style to use'), _('STYLE')),
4749 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4902 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4750 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4903 ('', 'certificate', '', _('SSL certificate file'), _('FILE')),
4904 ('', 'print-url', None, _('start and print only the URL'))]
4751 + subrepoopts,
4905 + subrepoopts,
4752 _('[OPTION]...'),
4906 _('[OPTION]...'),
4753 optionalrepo=True)
4907 optionalrepo=True)
@@ -4779,6 +4933,8 b' def serve(ui, repo, **opts):'
4779 opts = pycompat.byteskwargs(opts)
4933 opts = pycompat.byteskwargs(opts)
4780 if opts["stdio"] and opts["cmdserver"]:
4934 if opts["stdio"] and opts["cmdserver"]:
4781 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4935 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4936 if opts["print_url"] and ui.verbose:
4937 raise error.Abort(_("cannot use --print-url with --verbose"))
4782
4938
4783 if opts["stdio"]:
4939 if opts["stdio"]:
4784 if repo is None:
4940 if repo is None:
@@ -4790,6 +4946,8 b' def serve(ui, repo, **opts):'
4790 service = server.createservice(ui, repo, opts)
4946 service = server.createservice(ui, repo, opts)
4791 return server.runservice(opts, initfn=service.init, runfn=service.run)
4947 return server.runservice(opts, initfn=service.init, runfn=service.run)
4792
4948
4949 _NOTTERSE = 'nothing'
4950
4793 @command('^status|st',
4951 @command('^status|st',
4794 [('A', 'all', None, _('show status of all files')),
4952 [('A', 'all', None, _('show status of all files')),
4795 ('m', 'modified', None, _('show only modified files')),
4953 ('m', 'modified', None, _('show only modified files')),
@@ -4800,7 +4958,7 b' def serve(ui, repo, **opts):'
4800 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4958 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4801 ('i', 'ignored', None, _('show only ignored files')),
4959 ('i', 'ignored', None, _('show only ignored files')),
4802 ('n', 'no-status', None, _('hide status prefix')),
4960 ('n', 'no-status', None, _('hide status prefix')),
4803 ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
4961 ('t', 'terse', _NOTTERSE, _('show the terse output (EXPERIMENTAL)')),
4804 ('C', 'copies', None, _('show source of copied files')),
4962 ('C', 'copies', None, _('show source of copied files')),
4805 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4963 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4806 ('', 'rev', [], _('show difference from revision'), _('REV')),
4964 ('', 'rev', [], _('show difference from revision'), _('REV')),
@@ -4898,6 +5056,11 b' def status(ui, repo, *pats, **opts):'
4898 revs = opts.get('rev')
5056 revs = opts.get('rev')
4899 change = opts.get('change')
5057 change = opts.get('change')
4900 terse = opts.get('terse')
5058 terse = opts.get('terse')
5059 if terse is _NOTTERSE:
5060 if revs:
5061 terse = ''
5062 else:
5063 terse = ui.config('commands', 'status.terse')
4901
5064
4902 if revs and change:
5065 if revs and change:
4903 msg = _('cannot specify --rev and --change at the same time')
5066 msg = _('cannot specify --rev and --change at the same time')
@@ -4939,7 +5102,8 b' def status(ui, repo, *pats, **opts):'
4939 # we need to compute clean and unknown to terse
5102 # we need to compute clean and unknown to terse
4940 stat = repo.status(ctx1.node(), ctx2.node(), m,
5103 stat = repo.status(ctx1.node(), ctx2.node(), m,
4941 'ignored' in show or 'i' in terse,
5104 'ignored' in show or 'i' in terse,
4942 True, True, opts.get('subrepos'))
5105 clean=True, unknown=True,
5106 listsubrepos=opts.get('subrepos'))
4943
5107
4944 stat = cmdutil.tersedir(stat, terse)
5108 stat = cmdutil.tersedir(stat, terse)
4945 else:
5109 else:
@@ -4963,6 +5127,7 b' def status(ui, repo, *pats, **opts):'
4963 label = 'status.' + state
5127 label = 'status.' + state
4964 for f in files:
5128 for f in files:
4965 fm.startitem()
5129 fm.startitem()
5130 fm.context(ctx=ctx2)
4966 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5131 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4967 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5132 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4968 if f in copy:
5133 if f in copy:
@@ -5301,10 +5466,7 b' def tag(ui, repo, name1, *names, **opts)'
5301 Returns 0 on success.
5466 Returns 0 on success.
5302 """
5467 """
5303 opts = pycompat.byteskwargs(opts)
5468 opts = pycompat.byteskwargs(opts)
5304 wlock = lock = None
5469 with repo.wlock(), repo.lock():
5305 try:
5306 wlock = repo.wlock()
5307 lock = repo.lock()
5308 rev_ = "."
5470 rev_ = "."
5309 names = [t.strip() for t in (name1,) + names]
5471 names = [t.strip() for t in (name1,) + names]
5310 if len(names) != len(set(names)):
5472 if len(names) != len(set(names)):
@@ -5375,8 +5537,6 b' def tag(ui, repo, name1, *names, **opts)'
5375
5537
5376 tagsmod.tag(repo, names, node, message, opts.get('local'),
5538 tagsmod.tag(repo, names, node, message, opts.get('local'),
5377 opts.get('user'), date, editor=editor)
5539 opts.get('user'), date, editor=editor)
5378 finally:
5379 release(lock, wlock)
5380
5540
5381 @command('tags', formatteropts, '', intents={INTENT_READONLY})
5541 @command('tags', formatteropts, '', intents={INTENT_READONLY})
5382 def tags(ui, repo, **opts):
5542 def tags(ui, repo, **opts):
@@ -5392,6 +5552,7 b' def tags(ui, repo, **opts):'
5392 opts = pycompat.byteskwargs(opts)
5552 opts = pycompat.byteskwargs(opts)
5393 ui.pager('tags')
5553 ui.pager('tags')
5394 fm = ui.formatter('tags', opts)
5554 fm = ui.formatter('tags', opts)
5555 contexthint = fm.contexthint('tag rev node type')
5395 hexfunc = fm.hexfunc
5556 hexfunc = fm.hexfunc
5396 tagtype = ""
5557 tagtype = ""
5397
5558
@@ -5404,6 +5565,8 b' def tags(ui, repo, **opts):'
5404 tagtype = 'local'
5565 tagtype = 'local'
5405
5566
5406 fm.startitem()
5567 fm.startitem()
5568 if 'ctx' in contexthint:
5569 fm.context(ctx=repo[n])
5407 fm.write('tag', '%s', t, label=label)
5570 fm.write('tag', '%s', t, label=label)
5408 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5571 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5409 fm.condwrite(not ui.quiet, 'rev node', fmt,
5572 fm.condwrite(not ui.quiet, 'rev node', fmt,
@@ -5583,18 +5746,19 b' def update(ui, repo, node=None, **opts):'
5583 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
5746 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
5584 ctx = scmutil.revsingle(repo, rev, rev)
5747 ctx = scmutil.revsingle(repo, rev, rev)
5585 rev = ctx.rev()
5748 rev = ctx.rev()
5586 if ctx.hidden():
5749 hidden = ctx.hidden()
5750 overrides = {('ui', 'forcemerge'): opts.get(r'tool', '')}
5751 with ui.configoverride(overrides, 'update'):
5752 ret = hg.updatetotally(ui, repo, rev, brev, clean=clean,
5753 updatecheck=updatecheck)
5754 if hidden:
5587 ctxstr = ctx.hex()[:12]
5755 ctxstr = ctx.hex()[:12]
5588 ui.warn(_("updating to a hidden changeset %s\n") % ctxstr)
5756 ui.warn(_("updated to hidden changeset %s\n") % ctxstr)
5589
5757
5590 if ctx.obsolete():
5758 if ctx.obsolete():
5591 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
5759 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
5592 ui.warn("(%s)\n" % obsfatemsg)
5760 ui.warn("(%s)\n" % obsfatemsg)
5593
5761 return ret
5594 repo.ui.setconfig('ui', 'forcemerge', opts.get(r'tool'), 'update')
5595
5596 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5597 updatecheck=updatecheck)
5598
5762
5599 @command('verify', [])
5763 @command('verify', [])
5600 def verify(ui, repo):
5764 def verify(ui, repo):
@@ -256,7 +256,7 b' class server(object):'
256 self.cout, self.cerr)
256 self.cout, self.cerr)
257
257
258 try:
258 try:
259 ret = (dispatch.dispatch(req) or 0) & 255 # might return None
259 ret = dispatch.dispatch(req) & 255
260 self.cresult.write(struct.pack('>i', int(ret)))
260 self.cresult.write(struct.pack('>i', int(ret)))
261 finally:
261 finally:
262 # restore old cwd
262 # restore old cwd
@@ -494,6 +494,8 b' class unixforkingservice(object):'
494 conn.close() # release handle in parent process
494 conn.close() # release handle in parent process
495 else:
495 else:
496 try:
496 try:
497 selector.close()
498 self._sock.close()
497 self._runworker(conn)
499 self._runworker(conn)
498 conn.close()
500 conn.close()
499 os._exit(0)
501 os._exit(0)
@@ -215,7 +215,7 b' def parselist(value):'
215 parts.append('')
215 parts.append('')
216 if s[offset:offset + 1] == '"' and not parts[-1]:
216 if s[offset:offset + 1] == '"' and not parts[-1]:
217 return _parse_quote, parts, offset + 1
217 return _parse_quote, parts, offset + 1
218 elif s[offset:offset + 1] == '"' and parts[-1][-1] == '\\':
218 elif s[offset:offset + 1] == '"' and parts[-1][-1:] == '\\':
219 parts[-1] = parts[-1][:-1] + s[offset:offset + 1]
219 parts[-1] = parts[-1][:-1] + s[offset:offset + 1]
220 return _parse_plain, parts, offset + 1
220 return _parse_plain, parts, offset + 1
221 parts[-1] += s[offset:offset + 1]
221 parts[-1] += s[offset:offset + 1]
@@ -147,6 +147,9 b" coreconfigitem('annotate', 'nobinary',"
147 coreconfigitem('annotate', 'noprefix',
147 coreconfigitem('annotate', 'noprefix',
148 default=False,
148 default=False,
149 )
149 )
150 coreconfigitem('annotate', 'word-diff',
151 default=False,
152 )
150 coreconfigitem('auth', 'cookiefile',
153 coreconfigitem('auth', 'cookiefile',
151 default=None,
154 default=None,
152 )
155 )
@@ -184,6 +187,9 b" coreconfigitem('color', 'mode',"
184 coreconfigitem('color', 'pagermode',
187 coreconfigitem('color', 'pagermode',
185 default=dynamicdefault,
188 default=dynamicdefault,
186 )
189 )
190 coreconfigitem('commands', 'grep.all-files',
191 default=False,
192 )
187 coreconfigitem('commands', 'show.aliasprefix',
193 coreconfigitem('commands', 'show.aliasprefix',
188 default=list,
194 default=list,
189 )
195 )
@@ -193,13 +199,14 b" coreconfigitem('commands', 'status.relat"
193 coreconfigitem('commands', 'status.skipstates',
199 coreconfigitem('commands', 'status.skipstates',
194 default=[],
200 default=[],
195 )
201 )
202 coreconfigitem('commands', 'status.terse',
203 default='',
204 )
196 coreconfigitem('commands', 'status.verbose',
205 coreconfigitem('commands', 'status.verbose',
197 default=False,
206 default=False,
198 )
207 )
199 coreconfigitem('commands', 'update.check',
208 coreconfigitem('commands', 'update.check',
200 default=None,
209 default=None,
201 # Deprecated, remove after 4.4 release
202 alias=[('experimental', 'updatecheck')]
203 )
210 )
204 coreconfigitem('commands', 'update.requiredest',
211 coreconfigitem('commands', 'update.requiredest',
205 default=False,
212 default=False,
@@ -208,6 +215,9 b" coreconfigitem('committemplate', '.*',"
208 default=None,
215 default=None,
209 generic=True,
216 generic=True,
210 )
217 )
218 coreconfigitem('convert', 'bzr.saverev',
219 default=True,
220 )
211 coreconfigitem('convert', 'cvsps.cache',
221 coreconfigitem('convert', 'cvsps.cache',
212 default=True,
222 default=True,
213 )
223 )
@@ -362,6 +372,9 b" coreconfigitem('devel', 'user.obsmarker'"
362 coreconfigitem('devel', 'warn-config-unknown',
372 coreconfigitem('devel', 'warn-config-unknown',
363 default=None,
373 default=None,
364 )
374 )
375 coreconfigitem('devel', 'debug.extensions',
376 default=False,
377 )
365 coreconfigitem('devel', 'debug.peer-request',
378 coreconfigitem('devel', 'debug.peer-request',
366 default=False,
379 default=False,
367 )
380 )
@@ -395,6 +408,9 b" coreconfigitem('diff', 'nobinary',"
395 coreconfigitem('diff', 'noprefix',
408 coreconfigitem('diff', 'noprefix',
396 default=False,
409 default=False,
397 )
410 )
411 coreconfigitem('diff', 'word-diff',
412 default=False,
413 )
398 coreconfigitem('email', 'bcc',
414 coreconfigitem('email', 'bcc',
399 default=None,
415 default=None,
400 )
416 )
@@ -508,9 +524,6 b" coreconfigitem('experimental', 'evolutio"
508 coreconfigitem('experimental', 'evolution.track-operation',
524 coreconfigitem('experimental', 'evolution.track-operation',
509 default=True,
525 default=True,
510 )
526 )
511 coreconfigitem('experimental', 'worddiff',
512 default=False,
513 )
514 coreconfigitem('experimental', 'maxdeltachainspan',
527 coreconfigitem('experimental', 'maxdeltachainspan',
515 default=-1,
528 default=-1,
516 )
529 )
@@ -559,12 +572,18 b" coreconfigitem('experimental', 'httppost"
559 coreconfigitem('experimental', 'mergedriver',
572 coreconfigitem('experimental', 'mergedriver',
560 default=None,
573 default=None,
561 )
574 )
575 coreconfigitem('experimental', 'nointerrupt', default=False)
576 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
577
562 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
578 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
563 default=False,
579 default=False,
564 )
580 )
565 coreconfigitem('experimental', 'remotenames',
581 coreconfigitem('experimental', 'remotenames',
566 default=False,
582 default=False,
567 )
583 )
584 coreconfigitem('experimental', 'removeemptydirs',
585 default=True,
586 )
568 coreconfigitem('experimental', 'revlogv2',
587 coreconfigitem('experimental', 'revlogv2',
569 default=None,
588 default=None,
570 )
589 )
@@ -581,10 +600,10 b" coreconfigitem('experimental', 'sparse-r"
581 default=False,
600 default=False,
582 )
601 )
583 coreconfigitem('experimental', 'sparse-read.density-threshold',
602 coreconfigitem('experimental', 'sparse-read.density-threshold',
584 default=0.25,
603 default=0.50,
585 )
604 )
586 coreconfigitem('experimental', 'sparse-read.min-gap-size',
605 coreconfigitem('experimental', 'sparse-read.min-gap-size',
587 default='256K',
606 default='65K',
588 )
607 )
589 coreconfigitem('experimental', 'treemanifest',
608 coreconfigitem('experimental', 'treemanifest',
590 default=False,
609 default=False,
@@ -604,6 +623,9 b" coreconfigitem('experimental', 'web.api."
604 coreconfigitem('experimental', 'web.api.debugreflect',
623 coreconfigitem('experimental', 'web.api.debugreflect',
605 default=False,
624 default=False,
606 )
625 )
626 coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
627 default=False,
628 )
607 coreconfigitem('experimental', 'xdiff',
629 coreconfigitem('experimental', 'xdiff',
608 default=False,
630 default=False,
609 )
631 )
@@ -615,9 +637,6 b" coreconfigitem('extdata', '.*',"
615 default=None,
637 default=None,
616 generic=True,
638 generic=True,
617 )
639 )
618 coreconfigitem('format', 'aggressivemergedeltas',
619 default=False,
620 )
621 coreconfigitem('format', 'chunkcachesize',
640 coreconfigitem('format', 'chunkcachesize',
622 default=None,
641 default=None,
623 )
642 )
@@ -636,6 +655,9 b" coreconfigitem('format', 'maxchainlen',"
636 coreconfigitem('format', 'obsstore-version',
655 coreconfigitem('format', 'obsstore-version',
637 default=None,
656 default=None,
638 )
657 )
658 coreconfigitem('format', 'sparse-revlog',
659 default=False,
660 )
639 coreconfigitem('format', 'usefncache',
661 coreconfigitem('format', 'usefncache',
640 default=True,
662 default=True,
641 )
663 )
@@ -866,6 +888,9 b" coreconfigitem('profiling', 'sort',"
866 coreconfigitem('profiling', 'statformat',
888 coreconfigitem('profiling', 'statformat',
867 default='hotpath',
889 default='hotpath',
868 )
890 )
891 coreconfigitem('profiling', 'time-track',
892 default='cpu',
893 )
869 coreconfigitem('profiling', 'type',
894 coreconfigitem('profiling', 'type',
870 default='stat',
895 default='stat',
871 )
896 )
@@ -902,6 +927,10 b" coreconfigitem('progress', 'width',"
902 coreconfigitem('push', 'pushvars.server',
927 coreconfigitem('push', 'pushvars.server',
903 default=False,
928 default=False,
904 )
929 )
930 coreconfigitem('revlog', 'optimize-delta-parent-choice',
931 default=True,
932 # formely an experimental option: format.aggressivemergedeltas
933 )
905 coreconfigitem('server', 'bookmarks-pushkey-compat',
934 coreconfigitem('server', 'bookmarks-pushkey-compat',
906 default=True,
935 default=True,
907 )
936 )
@@ -932,16 +961,16 b" coreconfigitem('server', 'concurrent-pus"
932 coreconfigitem('server', 'disablefullbundle',
961 coreconfigitem('server', 'disablefullbundle',
933 default=False,
962 default=False,
934 )
963 )
935 coreconfigitem('server', 'streamunbundle',
964 coreconfigitem('server', 'maxhttpheaderlen',
936 default=False,
965 default=1024,
937 )
966 )
938 coreconfigitem('server', 'pullbundle',
967 coreconfigitem('server', 'pullbundle',
939 default=False,
968 default=False,
940 )
969 )
941 coreconfigitem('server', 'maxhttpheaderlen',
970 coreconfigitem('server', 'preferuncompressed',
942 default=1024,
971 default=False,
943 )
972 )
944 coreconfigitem('server', 'preferuncompressed',
973 coreconfigitem('server', 'streamunbundle',
945 default=False,
974 default=False,
946 )
975 )
947 coreconfigitem('server', 'uncompressed',
976 coreconfigitem('server', 'uncompressed',
@@ -1065,6 +1094,9 b" coreconfigitem('ui', 'formatted',"
1065 coreconfigitem('ui', 'graphnodetemplate',
1094 coreconfigitem('ui', 'graphnodetemplate',
1066 default=None,
1095 default=None,
1067 )
1096 )
1097 coreconfigitem('ui', 'history-editing-backup',
1098 default=True,
1099 )
1068 coreconfigitem('ui', 'interactive',
1100 coreconfigitem('ui', 'interactive',
1069 default=None,
1101 default=None,
1070 )
1102 )
@@ -1074,6 +1106,9 b" coreconfigitem('ui', 'interface',"
1074 coreconfigitem('ui', 'interface.chunkselector',
1106 coreconfigitem('ui', 'interface.chunkselector',
1075 default=None,
1107 default=None,
1076 )
1108 )
1109 coreconfigitem('ui', 'large-file-limit',
1110 default=10000000,
1111 )
1077 coreconfigitem('ui', 'logblockedtimes',
1112 coreconfigitem('ui', 'logblockedtimes',
1078 default=False,
1113 default=False,
1079 )
1114 )
@@ -1225,7 +1260,8 b" coreconfigitem('web', 'accesslog',"
1225 coreconfigitem('web', 'address',
1260 coreconfigitem('web', 'address',
1226 default='',
1261 default='',
1227 )
1262 )
1228 coreconfigitem('web', 'allow_archive',
1263 coreconfigitem('web', 'allow-archive',
1264 alias=[('web', 'allow_archive')],
1229 default=list,
1265 default=list,
1230 )
1266 )
1231 coreconfigitem('web', 'allow_read',
1267 coreconfigitem('web', 'allow_read',
@@ -10,7 +10,6 b' from __future__ import absolute_import'
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import re
14 import stat
13 import stat
15
14
16 from .i18n import _
15 from .i18n import _
@@ -24,7 +23,6 b' from .node import ('
24 short,
23 short,
25 wdirfilenodeids,
24 wdirfilenodeids,
26 wdirid,
25 wdirid,
27 wdirrev,
28 )
26 )
29 from . import (
27 from . import (
30 dagop,
28 dagop,
@@ -52,8 +50,6 b' from .utils import ('
52
50
53 propertycache = util.propertycache
51 propertycache = util.propertycache
54
52
55 nonascii = re.compile(br'[^\x21-\x7f]').search
56
57 class basectx(object):
53 class basectx(object):
58 """A basectx object represents the common logic for its children:
54 """A basectx object represents the common logic for its children:
59 changectx: read-only context that is already present in the repo,
55 changectx: read-only context that is already present in the repo,
@@ -185,8 +181,8 b' class basectx(object):'
185 def mutable(self):
181 def mutable(self):
186 return self.phase() > phases.public
182 return self.phase() > phases.public
187
183
188 def getfileset(self, expr):
184 def matchfileset(self, expr, badfn=None):
189 return fileset.getfileset(self, expr)
185 return fileset.match(self, expr, badfn=badfn)
190
186
191 def obsolete(self):
187 def obsolete(self):
192 """True if the changeset is obsolete"""
188 """True if the changeset is obsolete"""
@@ -298,14 +294,18 b' class basectx(object):'
298 auditor=r.nofsauditor, ctx=self,
294 auditor=r.nofsauditor, ctx=self,
299 listsubrepos=listsubrepos, badfn=badfn)
295 listsubrepos=listsubrepos, badfn=badfn)
300
296
301 def diff(self, ctx2=None, match=None, **opts):
297 def diff(self, ctx2=None, match=None, changes=None, opts=None,
298 losedatafn=None, prefix='', relroot='', copy=None,
299 hunksfilterfn=None):
302 """Returns a diff generator for the given contexts and matcher"""
300 """Returns a diff generator for the given contexts and matcher"""
303 if ctx2 is None:
301 if ctx2 is None:
304 ctx2 = self.p1()
302 ctx2 = self.p1()
305 if ctx2 is not None:
303 if ctx2 is not None:
306 ctx2 = self._repo[ctx2]
304 ctx2 = self._repo[ctx2]
307 diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
305 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
308 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
306 opts=opts, losedatafn=losedatafn, prefix=prefix,
307 relroot=relroot, copy=copy,
308 hunksfilterfn=hunksfilterfn)
309
309
310 def dirs(self):
310 def dirs(self):
311 return self._manifest.dirs()
311 return self._manifest.dirs()
@@ -377,31 +377,6 b' class basectx(object):'
377
377
378 return r
378 return r
379
379
380 def changectxdeprecwarn(repo):
381 # changectx's constructor will soon lose support for these forms of
382 # changeids:
383 # * stringinfied ints
384 # * bookmarks, tags, branches, and other namespace identifiers
385 # * hex nodeid prefixes
386 #
387 # Depending on your use case, replace repo[x] by one of these:
388 # * If you want to support general revsets, use scmutil.revsingle(x)
389 # * If you know that "x" is a stringified int, use repo[int(x)]
390 # * If you know that "x" is a bookmark, use repo._bookmarks.changectx(x)
391 # * If you know that "x" is a tag, use repo[repo.tags()[x]]
392 # * If you know that "x" is a branch or in some other namespace,
393 # use the appropriate mechanism for that namespace
394 # * If you know that "x" is a hex nodeid prefix, use
395 # repo[scmutil.resolvehexnodeidprefix(repo, x)]
396 # * If "x" is a string that can be any of the above, but you don't want
397 # to allow general revsets (perhaps because "x" may come from a remote
398 # user and the revset may be too costly), use scmutil.revsymbol(repo, x)
399 # * If "x" can be a mix of the above, you'll have to figure it out
400 # yourself
401 repo.ui.deprecwarn("changectx.__init__ is getting more limited, see "
402 "context.changectxdeprecwarn() for details", "4.6",
403 stacklevel=4)
404
405 class changectx(basectx):
380 class changectx(basectx):
406 """A changecontext object makes access to data related to a particular
381 """A changecontext object makes access to data related to a particular
407 changeset convenient. It represents a read-only context already present in
382 changeset convenient. It represents a read-only context already present in
@@ -415,22 +390,22 b' class changectx(basectx):'
415 self._node = repo.changelog.node(changeid)
390 self._node = repo.changelog.node(changeid)
416 self._rev = changeid
391 self._rev = changeid
417 return
392 return
418 if changeid == 'null':
393 elif changeid == 'null':
419 self._node = nullid
394 self._node = nullid
420 self._rev = nullrev
395 self._rev = nullrev
421 return
396 return
422 if changeid == 'tip':
397 elif changeid == 'tip':
423 self._node = repo.changelog.tip()
398 self._node = repo.changelog.tip()
424 self._rev = repo.changelog.rev(self._node)
399 self._rev = repo.changelog.rev(self._node)
425 return
400 return
426 if (changeid == '.'
401 elif (changeid == '.'
427 or repo.local() and changeid == repo.dirstate.p1()):
402 or repo.local() and changeid == repo.dirstate.p1()):
428 # this is a hack to delay/avoid loading obsmarkers
403 # this is a hack to delay/avoid loading obsmarkers
429 # when we know that '.' won't be hidden
404 # when we know that '.' won't be hidden
430 self._node = repo.dirstate.p1()
405 self._node = repo.dirstate.p1()
431 self._rev = repo.unfiltered().changelog.rev(self._node)
406 self._rev = repo.unfiltered().changelog.rev(self._node)
432 return
407 return
433 if len(changeid) == 20:
408 elif len(changeid) == 20:
434 try:
409 try:
435 self._node = changeid
410 self._node = changeid
436 self._rev = repo.changelog.rev(changeid)
411 self._rev = repo.changelog.rev(changeid)
@@ -438,27 +413,17 b' class changectx(basectx):'
438 except error.FilteredLookupError:
413 except error.FilteredLookupError:
439 raise
414 raise
440 except LookupError:
415 except LookupError:
441 pass
416 # check if it might have come from damaged dirstate
417 #
418 # XXX we could avoid the unfiltered if we had a recognizable
419 # exception for filtered changeset access
420 if (repo.local()
421 and changeid in repo.unfiltered().dirstate.parents()):
422 msg = _("working directory has unknown parent '%s'!")
423 raise error.Abort(msg % short(changeid))
424 changeid = hex(changeid) # for the error message
442
425
443 try:
426 elif len(changeid) == 40:
444 r = int(changeid)
445 if '%d' % r != changeid:
446 raise ValueError
447 l = len(repo.changelog)
448 if r < 0:
449 r += l
450 if r < 0 or r >= l and r != wdirrev:
451 raise ValueError
452 self._rev = r
453 self._node = repo.changelog.node(r)
454 changectxdeprecwarn(repo)
455 return
456 except error.FilteredIndexError:
457 raise
458 except (ValueError, OverflowError, IndexError):
459 pass
460
461 if len(changeid) == 40:
462 try:
427 try:
463 self._node = bin(changeid)
428 self._node = bin(changeid)
464 self._rev = repo.changelog.rev(self._node)
429 self._rev = repo.changelog.rev(self._node)
@@ -467,39 +432,15 b' class changectx(basectx):'
467 raise
432 raise
468 except (TypeError, LookupError):
433 except (TypeError, LookupError):
469 pass
434 pass
470
435 else:
471 # lookup bookmarks through the name interface
436 raise error.ProgrammingError(
472 try:
437 "unsupported changeid '%s' of type %s" %
473 self._node = repo.names.singlenode(repo, changeid)
438 (changeid, type(changeid)))
474 self._rev = repo.changelog.rev(self._node)
475 changectxdeprecwarn(repo)
476 return
477 except KeyError:
478 pass
479
480 self._node = scmutil.resolvehexnodeidprefix(repo, changeid)
481 if self._node is not None:
482 self._rev = repo.changelog.rev(self._node)
483 changectxdeprecwarn(repo)
484 return
485
439
486 # lookup failed
440 # lookup failed
487 # check if it might have come from damaged dirstate
488 #
489 # XXX we could avoid the unfiltered if we had a recognizable
490 # exception for filtered changeset access
491 if (repo.local()
492 and changeid in repo.unfiltered().dirstate.parents()):
493 msg = _("working directory has unknown parent '%s'!")
494 raise error.Abort(msg % short(changeid))
495 try:
496 if len(changeid) == 20 and nonascii(changeid):
497 changeid = hex(changeid)
498 except TypeError:
499 pass
500 except (error.FilteredIndexError, error.FilteredLookupError):
441 except (error.FilteredIndexError, error.FilteredLookupError):
501 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
442 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
502 % changeid)
443 % pycompat.bytestr(changeid))
503 except error.FilteredRepoLookupError:
444 except error.FilteredRepoLookupError:
504 raise
445 raise
505 except IndexError:
446 except IndexError:
@@ -649,8 +590,14 b' class changectx(basectx):'
649 return changectx(self._repo, anc)
590 return changectx(self._repo, anc)
650
591
651 def descendant(self, other):
592 def descendant(self, other):
652 """True if other is descendant of this changeset"""
593 msg = (b'ctx.descendant(other) is deprecated, '
653 return self._repo.changelog.descendant(self._rev, other._rev)
594 'use ctx.isancestorof(other)')
595 self._repo.ui.deprecwarn(msg, b'4.7')
596 return self.isancestorof(other)
597
598 def isancestorof(self, other):
599 """True if this changeset is an ancestor of other"""
600 return self._repo.changelog.isancestorrev(self._rev, other._rev)
654
601
655 def walk(self, match):
602 def walk(self, match):
656 '''Generates matching file names.'''
603 '''Generates matching file names.'''
@@ -1294,7 +1241,8 b' class committablectx(basectx):'
1294 unknown=True, ignored=False))
1241 unknown=True, ignored=False))
1295
1242
1296 def matches(self, match):
1243 def matches(self, match):
1297 return sorted(self._repo.dirstate.matches(match))
1244 ds = self._repo.dirstate
1245 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1298
1246
1299 def ancestors(self):
1247 def ancestors(self):
1300 for p in self._parents:
1248 for p in self._parents:
@@ -1399,7 +1347,8 b' class workingctx(committablectx):'
1399 ui.warn(_("%s does not exist!\n") % uipath(f))
1347 ui.warn(_("%s does not exist!\n") % uipath(f))
1400 rejected.append(f)
1348 rejected.append(f)
1401 continue
1349 continue
1402 if st.st_size > 10000000:
1350 limit = ui.configbytes('ui', 'large-file-limit')
1351 if limit != 0 and st.st_size > limit:
1403 ui.warn(_("%s: up to %d MB of RAM may be required "
1352 ui.warn(_("%s: up to %d MB of RAM may be required "
1404 "to manage this file\n"
1353 "to manage this file\n"
1405 "(use 'hg revert %s' to cancel the "
1354 "(use 'hg revert %s' to cancel the "
@@ -1773,7 +1722,9 b' class workingfilectx(committablefilectx)'
1773
1722
1774 def remove(self, ignoremissing=False):
1723 def remove(self, ignoremissing=False):
1775 """wraps unlink for a repo's working directory"""
1724 """wraps unlink for a repo's working directory"""
1776 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1725 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1726 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1727 rmdir=rmdir)
1777
1728
1778 def write(self, data, flags, backgroundclose=False, **kwargs):
1729 def write(self, data, flags, backgroundclose=False, **kwargs):
1779 """wraps repo.wwrite"""
1730 """wraps repo.wwrite"""
@@ -254,6 +254,11 b' def _computenonoverlap(repo, c1, c2, add'
254 repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
254 repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
255 if u2:
255 if u2:
256 repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
256 repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
257
258 narrowmatch = repo.narrowmatch()
259 if not narrowmatch.always():
260 u1 = [f for f in u1 if narrowmatch(f)]
261 u2 = [f for f in u2 if narrowmatch(f)]
257 return u1, u2
262 return u1, u2
258
263
259 def _makegetfctx(ctx):
264 def _makegetfctx(ctx):
@@ -411,14 +416,14 b' def _fullcopytracing(repo, c1, c2, base)'
411 # common ancestor or not without explicitly checking it, it's better to
416 # common ancestor or not without explicitly checking it, it's better to
412 # determine that here.
417 # determine that here.
413 #
418 #
414 # base.descendant(wc) and base.descendant(base) are False, work around that
419 # base.isancestorof(wc) is False, work around that
415 _c1 = c1.p1() if c1.rev() is None else c1
420 _c1 = c1.p1() if c1.rev() is None else c1
416 _c2 = c2.p1() if c2.rev() is None else c2
421 _c2 = c2.p1() if c2.rev() is None else c2
417 # an endpoint is "dirty" if it isn't a descendant of the merge base
422 # an endpoint is "dirty" if it isn't a descendant of the merge base
418 # if we have a dirty endpoint, we need to trigger graft logic, and also
423 # if we have a dirty endpoint, we need to trigger graft logic, and also
419 # keep track of which endpoint is dirty
424 # keep track of which endpoint is dirty
420 dirtyc1 = not (base == _c1 or base.descendant(_c1))
425 dirtyc1 = not base.isancestorof(_c1)
421 dirtyc2 = not (base == _c2 or base.descendant(_c2))
426 dirtyc2 = not base.isancestorof(_c2)
422 graft = dirtyc1 or dirtyc2
427 graft = dirtyc1 or dirtyc2
423 tca = base
428 tca = base
424 if graft:
429 if graft:
@@ -65,6 +65,11 b' except ImportError:'
65 # compiled with curses
65 # compiled with curses
66 curses = False
66 curses = False
67
67
68 class fallbackerror(error.Abort):
69 """Error that indicates the client should try to fallback to text mode."""
70 # Inherits from error.Abort so that existing behavior is preserved if the
71 # calling code does not know how to fallback.
72
68 def checkcurses(ui):
73 def checkcurses(ui):
69 """Return True if the user wants to use curses
74 """Return True if the user wants to use curses
70
75
@@ -529,8 +534,8 b' def chunkselector(ui, headerlist, operat'
529 origsigtstp = signal.getsignal(signal.SIGTSTP)
534 origsigtstp = signal.getsignal(signal.SIGTSTP)
530 try:
535 try:
531 curses.wrapper(chunkselector.main)
536 curses.wrapper(chunkselector.main)
532 if chunkselector.initerr is not None:
537 if chunkselector.initexc is not None:
533 raise error.Abort(chunkselector.initerr)
538 raise chunkselector.initexc
534 # ncurses does not restore signal handler for SIGTSTP
539 # ncurses does not restore signal handler for SIGTSTP
535 finally:
540 finally:
536 if origsigtstp is not sentinel:
541 if origsigtstp is not sentinel:
@@ -549,7 +554,7 b' def testchunkselector(testfn, ui, header'
549 """
554 """
550 chunkselector = curseschunkselector(headerlist, ui, operation)
555 chunkselector = curseschunkselector(headerlist, ui, operation)
551 if testfn and os.path.exists(testfn):
556 if testfn and os.path.exists(testfn):
552 testf = open(testfn)
557 testf = open(testfn, 'rb')
553 testcommands = [x.rstrip('\n') for x in testf.readlines()]
558 testcommands = [x.rstrip('\n') for x in testf.readlines()]
554 testf.close()
559 testf.close()
555 while True:
560 while True:
@@ -666,6 +671,7 b' class curseschunkselector(object):'
666 nextitem = currentitem
671 nextitem = currentitem
667
672
668 self.currentselecteditem = nextitem
673 self.currentselecteditem = nextitem
674 self.recenterdisplayedarea()
669
675
670 def downarrowevent(self):
676 def downarrowevent(self):
671 """
677 """
@@ -705,6 +711,7 b' class curseschunkselector(object):'
705 nextitem = currentitem
711 nextitem = currentitem
706
712
707 self.currentselecteditem = nextitem
713 self.currentselecteditem = nextitem
714 self.recenterdisplayedarea()
708
715
709 def rightarrowevent(self):
716 def rightarrowevent(self):
710 """
717 """
@@ -1718,7 +1725,7 b' are you sure you want to review/edit and'
1718 self.stdscr = stdscr
1725 self.stdscr = stdscr
1719 # error during initialization, cannot be printed in the curses
1726 # error during initialization, cannot be printed in the curses
1720 # interface, it should be printed by the calling code
1727 # interface, it should be printed by the calling code
1721 self.initerr = None
1728 self.initexc = None
1722 self.yscreensize, self.xscreensize = self.stdscr.getmaxyx()
1729 self.yscreensize, self.xscreensize = self.stdscr.getmaxyx()
1723
1730
1724 curses.start_color()
1731 curses.start_color()
@@ -1751,7 +1758,8 b' are you sure you want to review/edit and'
1751 try:
1758 try:
1752 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1759 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1753 except curses.error:
1760 except curses.error:
1754 self.initerr = _('this diff is too large to be displayed')
1761 self.initexc = fallbackerror(
1762 _('this diff is too large to be displayed'))
1755 return
1763 return
1756 # initialize selecteditemendline (initial start-line is 0)
1764 # initialize selecteditemendline (initial start-line is 0)
1757 self.selecteditemendline = self.getnumlinesdisplayed(
1765 self.selecteditemendline = self.getnumlinesdisplayed(
@@ -21,7 +21,6 b' import stat'
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import tempfile
25 import time
24 import time
26
25
27 from .i18n import _
26 from .i18n import _
@@ -71,7 +70,6 b' from . import ('
71 scmutil,
70 scmutil,
72 setdiscovery,
71 setdiscovery,
73 simplemerge,
72 simplemerge,
74 smartset,
75 sshpeer,
73 sshpeer,
76 sslutil,
74 sslutil,
77 streamclone,
75 streamclone,
@@ -183,18 +181,14 b' def debugbuilddag(ui, repo, text=None,'
183 initialmergedlines.append("")
181 initialmergedlines.append("")
184
182
185 tags = []
183 tags = []
186
184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 wlock = lock = tr = None
185 total=total)
188 try:
186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 wlock = repo.wlock()
190 lock = repo.lock()
191 tr = repo.transaction("builddag")
192
193 at = -1
187 at = -1
194 atbranch = 'default'
188 atbranch = 'default'
195 nodeids = []
189 nodeids = []
196 id = 0
190 id = 0
197 ui.progress(_('building'), id, unit=_('revisions'), total=total)
191 progress.update(id)
198 for type, data in dagparser.parsedag(text):
192 for type, data in dagparser.parsedag(text):
199 if type == 'n':
193 if type == 'n':
200 ui.note(('node %s\n' % pycompat.bytestr(data)))
194 ui.note(('node %s\n' % pycompat.bytestr(data)))
@@ -267,14 +261,10 b' def debugbuilddag(ui, repo, text=None,'
267 elif type == 'a':
261 elif type == 'a':
268 ui.note(('branch %s\n' % data))
262 ui.note(('branch %s\n' % data))
269 atbranch = data
263 atbranch = data
270 ui.progress(_('building'), id, unit=_('revisions'), total=total)
264 progress.update(id)
271 tr.close()
272
265
273 if tags:
266 if tags:
274 repo.vfs.write("localtags", "".join(tags))
267 repo.vfs.write("localtags", "".join(tags))
275 finally:
276 ui.progress(_('building'), None)
277 release(tr, lock, wlock)
278
268
279 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
280 indent_string = ' ' * indent
270 indent_string = ' ' * indent
@@ -437,7 +427,7 b' def debugcheckstate(ui, repo):'
437 'hg debugcolor')
427 'hg debugcolor')
438 def debugcolor(ui, repo, **opts):
428 def debugcolor(ui, repo, **opts):
439 """show available color, effects or style"""
429 """show available color, effects or style"""
440 ui.write(('color mode: %s\n') % ui._colormode)
430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
441 if opts.get(r'style'):
431 if opts.get(r'style'):
442 return _debugdisplaystyle(ui)
432 return _debugdisplaystyle(ui)
443 else:
433 else:
@@ -630,6 +620,8 b' def debugdeltachain(ui, repo, file_=None'
630 opts = pycompat.byteskwargs(opts)
620 opts = pycompat.byteskwargs(opts)
631 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
632 index = r.index
622 index = r.index
623 start = r.start
624 length = r.length
633 generaldelta = r.version & revlog.FLAG_GENERALDELTA
625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
634 withsparseread = getattr(r, '_withsparseread', False)
626 withsparseread = getattr(r, '_withsparseread', False)
635
627
@@ -677,8 +669,6 b' def debugdeltachain(ui, repo, file_=None'
677 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
678 chainbase = chain[0]
670 chainbase = chain[0]
679 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
680 start = r.start
681 length = r.length
682 basestart = start(chainbase)
672 basestart = start(chainbase)
683 revstart = start(rev)
673 revstart = start(rev)
684 lineardist = revstart + comp - basestart
674 lineardist = revstart + comp - basestart
@@ -688,8 +678,15 b' def debugdeltachain(ui, repo, file_=None'
688 except IndexError:
678 except IndexError:
689 prevrev = -1
679 prevrev = -1
690
680
691 chainratio = float(chainsize) / float(uncomp)
681 if uncomp != 0:
692 extraratio = float(extradist) / float(chainsize)
682 chainratio = float(chainsize) / float(uncomp)
683 else:
684 chainratio = chainsize
685
686 if chainsize != 0:
687 extraratio = float(extradist) / float(chainsize)
688 else:
689 extraratio = extradist
693
690
694 fm.startitem()
691 fm.startitem()
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
692 fm.write('rev chainid chainlen prevrev deltatype compsize '
@@ -718,7 +715,10 b' def debugdeltachain(ui, repo, file_=None'
718 if largestblock < blksize:
715 if largestblock < blksize:
719 largestblock = blksize
716 largestblock = blksize
720
717
721 readdensity = float(chainsize) / float(readsize)
718 if readsize:
719 readdensity = float(chainsize) / float(readsize)
720 else:
721 readdensity = 1
722
722
723 fm.write('readsize largestblock readdensity srchunks',
723 fm.write('readsize largestblock readdensity srchunks',
724 ' %10d %10d %9.5f %8d',
724 ' %10d %10d %9.5f %8d',
@@ -838,8 +838,8 b' def debugdownload(ui, repo, url, output='
838 if output:
838 if output:
839 dest.close()
839 dest.close()
840
840
841 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
841 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
842 def debugextensions(ui, **opts):
842 def debugextensions(ui, repo, **opts):
843 '''show information about active extensions'''
843 '''show information about active extensions'''
844 opts = pycompat.byteskwargs(opts)
844 opts = pycompat.byteskwargs(opts)
845 exts = extensions.extensions(ui)
845 exts = extensions.extensions(ui)
@@ -885,16 +885,38 b' def debugextensions(ui, **opts):'
885 fm.end()
885 fm.end()
886
886
887 @command('debugfileset',
887 @command('debugfileset',
888 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
888 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
889 _('[-r REV] FILESPEC'))
889 ('', 'all-files', False,
890 _('test files from all revisions and working directory'))],
891 _('[-r REV] [--all-files] FILESPEC'))
890 def debugfileset(ui, repo, expr, **opts):
892 def debugfileset(ui, repo, expr, **opts):
891 '''parse and apply a fileset specification'''
893 '''parse and apply a fileset specification'''
892 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
894 opts = pycompat.byteskwargs(opts)
895 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
893 if ui.verbose:
896 if ui.verbose:
894 tree = fileset.parse(expr)
897 tree = fileset.parse(expr)
895 ui.note(fileset.prettyformat(tree), "\n")
898 ui.note(fileset.prettyformat(tree), "\n")
896
899
897 for f in ctx.getfileset(expr):
900 files = set()
901 if opts['all_files']:
902 for r in repo:
903 c = repo[r]
904 files.update(c.files())
905 files.update(c.substate)
906 if opts['all_files'] or ctx.rev() is None:
907 wctx = repo[None]
908 files.update(repo.dirstate.walk(scmutil.matchall(repo),
909 subrepos=list(wctx.substate),
910 unknown=True, ignored=True))
911 files.update(wctx.substate)
912 else:
913 files.update(ctx.files())
914 files.update(ctx.substate)
915
916 m = ctx.matchfileset(expr)
917 for f in sorted(files):
918 if not m(f):
919 continue
898 ui.write("%s\n" % f)
920 ui.write("%s\n" % f)
899
921
900 @command('debugformat',
922 @command('debugformat',
@@ -971,7 +993,7 b' def debugfsinfo(ui, path="."):'
971 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
993 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
972 casesensitive = '(unknown)'
994 casesensitive = '(unknown)'
973 try:
995 try:
974 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
996 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
975 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
997 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
976 except OSError:
998 except OSError:
977 pass
999 pass
@@ -1143,7 +1165,7 b' def debuginstall(ui, **opts):'
1143 opts = pycompat.byteskwargs(opts)
1165 opts = pycompat.byteskwargs(opts)
1144
1166
1145 def writetemp(contents):
1167 def writetemp(contents):
1146 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1168 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1147 f = os.fdopen(fd, r"wb")
1169 f = os.fdopen(fd, r"wb")
1148 f.write(contents)
1170 f.write(contents)
1149 f.close()
1171 f.close()
@@ -1597,7 +1619,7 b' def debugobsolete(ui, repo, precursor=No'
1597 if opts['rev']:
1619 if opts['rev']:
1598 raise error.Abort('cannot select revision when creating marker')
1620 raise error.Abort('cannot select revision when creating marker')
1599 metadata = {}
1621 metadata = {}
1600 metadata['user'] = opts['user'] or ui.username()
1622 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1601 succs = tuple(parsenodeid(succ) for succ in successors)
1623 succs = tuple(parsenodeid(succ) for succ in successors)
1602 l = repo.lock()
1624 l = repo.lock()
1603 try:
1625 try:
@@ -2237,8 +2259,8 b' def debugrevspec(ui, repo, expr, **opts)'
2237 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2259 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2238 brevs = revset.makematcher(treebystage['optimized'])(repo)
2260 brevs = revset.makematcher(treebystage['optimized'])(repo)
2239 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2261 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2240 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2262 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2241 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2263 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2242 arevs = list(arevs)
2264 arevs = list(arevs)
2243 brevs = list(brevs)
2265 brevs = list(brevs)
2244 if arevs == brevs:
2266 if arevs == brevs:
@@ -2261,7 +2283,7 b' def debugrevspec(ui, repo, expr, **opts)'
2261 func = revset.makematcher(tree)
2283 func = revset.makematcher(tree)
2262 revs = func(repo)
2284 revs = func(repo)
2263 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2285 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2264 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2286 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2265 if not opts['show_revs']:
2287 if not opts['show_revs']:
2266 return
2288 return
2267 for c in revs:
2289 for c in revs:
@@ -2291,7 +2313,13 b' def debugserve(ui, repo, **opts):'
2291
2313
2292 if opts['logiofd']:
2314 if opts['logiofd']:
2293 # Line buffered because output is line based.
2315 # Line buffered because output is line based.
2294 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2316 try:
2317 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2318 except OSError as e:
2319 if e.errno != errno.ESPIPE:
2320 raise
2321 # can't seek a pipe, so `ab` mode fails on py3
2322 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2295 elif opts['logiofile']:
2323 elif opts['logiofile']:
2296 logfh = open(opts['logiofile'], 'ab', 1)
2324 logfh = open(opts['logiofile'], 'ab', 1)
2297
2325
@@ -2484,9 +2512,17 b' def debugtemplate(ui, repo, tmpl, **opts'
2484 if revs is None:
2512 if revs is None:
2485 tres = formatter.templateresources(ui, repo)
2513 tres = formatter.templateresources(ui, repo)
2486 t = formatter.maketemplater(ui, tmpl, resources=tres)
2514 t = formatter.maketemplater(ui, tmpl, resources=tres)
2515 if ui.verbose:
2516 kwds, funcs = t.symbolsuseddefault()
2517 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2518 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2487 ui.write(t.renderdefault(props))
2519 ui.write(t.renderdefault(props))
2488 else:
2520 else:
2489 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2521 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2522 if ui.verbose:
2523 kwds, funcs = displayer.t.symbolsuseddefault()
2524 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2525 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2490 for r in revs:
2526 for r in revs:
2491 displayer.show(repo[r], **pycompat.strkwargs(props))
2527 displayer.show(repo[r], **pycompat.strkwargs(props))
2492 displayer.close()
2528 displayer.close()
@@ -2544,7 +2580,8 b' def debugwalk(ui, repo, *pats, **opts):'
2544 """show how files match on given patterns"""
2580 """show how files match on given patterns"""
2545 opts = pycompat.byteskwargs(opts)
2581 opts = pycompat.byteskwargs(opts)
2546 m = scmutil.match(repo[None], pats, opts)
2582 m = scmutil.match(repo[None], pats, opts)
2547 ui.write(('matcher: %r\n' % m))
2583 if ui.verbose:
2584 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2548 items = list(repo[None].walk(m))
2585 items = list(repo[None].walk(m))
2549 if not items:
2586 if not items:
2550 return
2587 return
@@ -3018,10 +3055,12 b' def debugwireproto(ui, repo, path=None, '
3018
3055
3019 if isinstance(res, wireprotov2peer.commandresponse):
3056 if isinstance(res, wireprotov2peer.commandresponse):
3020 val = list(res.cborobjects())
3057 val = list(res.cborobjects())
3021 ui.status(_('response: %s\n') % stringutil.pprint(val))
3058 ui.status(_('response: %s\n') %
3059 stringutil.pprint(val, bprefix=True))
3022
3060
3023 else:
3061 else:
3024 ui.status(_('response: %s\n') % stringutil.pprint(res))
3062 ui.status(_('response: %s\n') %
3063 stringutil.pprint(res, bprefix=True))
3025
3064
3026 elif action == 'batchbegin':
3065 elif action == 'batchbegin':
3027 if batchedcommands is not None:
3066 if batchedcommands is not None:
@@ -3093,7 +3132,8 b' def debugwireproto(ui, repo, path=None, '
3093 continue
3132 continue
3094
3133
3095 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3134 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3096 ui.write(_('cbor> %s\n') % stringutil.pprint(cbor.loads(body)))
3135 ui.write(_('cbor> %s\n') %
3136 stringutil.pprint(cbor.loads(body), bprefix=True))
3097
3137
3098 elif action == 'close':
3138 elif action == 'close':
3099 peer.close()
3139 peer.close()
1 NO CONTENT: file copied from mercurial/patch.py to mercurial/diffutil.py
NO CONTENT: file copied from mercurial/patch.py to mercurial/diffutil.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from tests/test-command-template.t to tests/test-template-basic.t
NO CONTENT: file renamed from tests/test-command-template.t to tests/test-template-basic.t
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from tests/test-command-template.t to tests/test-template-functions.t
NO CONTENT: file copied from tests/test-command-template.t to tests/test-template-functions.t
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from tests/test-command-template.t to tests/test-template-keywords.t
NO CONTENT: file copied from tests/test-command-template.t to tests/test-template-keywords.t
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from tests/test-command-template.t to tests/test-template-map.t
NO CONTENT: file copied from tests/test-command-template.t to tests/test-template-map.t
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now