##// END OF EJS Templates
copies: add config option for writing copy metadata to file and/or changset...
Martin von Zweigbergk -
r42317:0e41f40b default
parent child Browse files
Show More
@@ -0,0 +1,105
1
2 $ cat >> $HGRCPATH << EOF
3 > [experimental]
4 > copies.write-to=changeset-only
5 > [alias]
6 > changesetcopies = log -r . -T 'files: {files}
7 > {extras % "{ifcontains("copies", key, "{key}: {value}\n")}"}'
8 > EOF
9
10 Check that copies are recorded correctly
11
12 $ hg init repo
13 $ cd repo
14 $ echo a > a
15 $ hg add a
16 $ hg ci -m initial
17 $ hg cp a b
18 $ hg cp a c
19 $ hg cp a d
20 $ hg ci -m 'copy a to b, c, and d'
21 $ hg changesetcopies
22 files: b c d
23 p1copies: b\x00a (esc)
24 c\x00a (esc)
25 d\x00a (esc)
26
27 Check that renames are recorded correctly
28
29 $ hg mv b b2
30 $ hg ci -m 'rename b to b2'
31 $ hg changesetcopies
32 files: b b2
33 p1copies: b2\x00b (esc)
34
35 Rename onto existing file. This should get recorded in the changeset files list and in the extras,
36 even though there is no filelog entry.
37
38 $ hg cp b2 c --force
39 $ hg st --copies
40 M c
41 b2
42 $ hg debugindex c
43 rev linkrev nodeid p1 p2
44 0 1 b789fdd96dc2 000000000000 000000000000
45 $ hg ci -m 'move b onto d'
46 $ hg changesetcopies
47 files: c
48 p1copies: c\x00b2 (esc)
49 $ hg debugindex c
50 rev linkrev nodeid p1 p2
51 0 1 b789fdd96dc2 000000000000 000000000000
52
53 Create a merge commit with copying done during merge.
54
55 $ hg co 0
56 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
57 $ hg cp a e
58 $ hg cp a f
59 $ hg ci -m 'copy a to e and f'
60 created new head
61 $ hg merge 3
62 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
63 (branch merge, don't forget to commit)
64 File 'a' exists on both sides, so 'g' could be recorded as being from p1 or p2, but we currently
65 always record it as being from p1
66 $ hg cp a g
67 File 'd' exists only in p2, so 'h' should be from p2
68 $ hg cp d h
69 File 'f' exists only in p1, so 'i' should be from p1
70 $ hg cp f i
71 $ hg ci -m 'merge'
72 $ hg changesetcopies
73 files: g h i
74 p1copies: g\x00a (esc)
75 i\x00f (esc)
76 p2copies: h\x00d (esc)
77
78 Test writing to both changeset and filelog
79
80 $ hg cp a j
81 $ hg ci -m 'copy a to j' --config experimental.copies.write-to=compatibility
82 $ hg changesetcopies
83 files: j
84 p1copies: j\x00a (esc)
85 $ hg debugdata j 0
86 \x01 (esc)
87 copy: a
88 copyrev: b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
89 \x01 (esc)
90 a
91
92 Test writing only to filelog
93
94 $ hg cp a k
95 $ hg ci -m 'copy a to k' --config experimental.copies.write-to=filelog-only
96 $ hg changesetcopies
97 files: k
98 $ hg debugdata k 0
99 \x01 (esc)
100 copy: a
101 copyrev: b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
102 \x01 (esc)
103 a
104
105 $ cd ..
@@ -1,585 +1,599
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import (
11 from .node import (
12 bin,
12 bin,
13 hex,
13 hex,
14 nullid,
14 nullid,
15 )
15 )
16 from .thirdparty import (
16 from .thirdparty import (
17 attr,
17 attr,
18 )
18 )
19
19
20 from . import (
20 from . import (
21 encoding,
21 encoding,
22 error,
22 error,
23 pycompat,
23 pycompat,
24 revlog,
24 revlog,
25 util,
25 util,
26 )
26 )
27 from .utils import (
27 from .utils import (
28 dateutil,
28 dateutil,
29 stringutil,
29 stringutil,
30 )
30 )
31
31
32 _defaultextra = {'branch': 'default'}
32 _defaultextra = {'branch': 'default'}
33
33
34 def _string_escape(text):
34 def _string_escape(text):
35 """
35 """
36 >>> from .pycompat import bytechr as chr
36 >>> from .pycompat import bytechr as chr
37 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
37 >>> d = {b'nl': chr(10), b'bs': chr(92), b'cr': chr(13), b'nul': chr(0)}
38 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
38 >>> s = b"ab%(nl)scd%(bs)s%(bs)sn%(nul)s12ab%(cr)scd%(bs)s%(nl)s" % d
39 >>> s
39 >>> s
40 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
40 'ab\\ncd\\\\\\\\n\\x0012ab\\rcd\\\\\\n'
41 >>> res = _string_escape(s)
41 >>> res = _string_escape(s)
42 >>> s == _string_unescape(res)
42 >>> s == _string_unescape(res)
43 True
43 True
44 """
44 """
45 # subset of the string_escape codec
45 # subset of the string_escape codec
46 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
46 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
47 return text.replace('\0', '\\0')
47 return text.replace('\0', '\\0')
48
48
49 def _string_unescape(text):
49 def _string_unescape(text):
50 if '\\0' in text:
50 if '\\0' in text:
51 # fix up \0 without getting into trouble with \\0
51 # fix up \0 without getting into trouble with \\0
52 text = text.replace('\\\\', '\\\\\n')
52 text = text.replace('\\\\', '\\\\\n')
53 text = text.replace('\\0', '\0')
53 text = text.replace('\\0', '\0')
54 text = text.replace('\n', '')
54 text = text.replace('\n', '')
55 return stringutil.unescapestr(text)
55 return stringutil.unescapestr(text)
56
56
57 def decodeextra(text):
57 def decodeextra(text):
58 """
58 """
59 >>> from .pycompat import bytechr as chr
59 >>> from .pycompat import bytechr as chr
60 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
60 >>> sorted(decodeextra(encodeextra({b'foo': b'bar', b'baz': chr(0) + b'2'})
61 ... ).items())
61 ... ).items())
62 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
62 [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')]
63 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
63 >>> sorted(decodeextra(encodeextra({b'foo': b'bar',
64 ... b'baz': chr(92) + chr(0) + b'2'})
64 ... b'baz': chr(92) + chr(0) + b'2'})
65 ... ).items())
65 ... ).items())
66 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
66 [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')]
67 """
67 """
68 extra = _defaultextra.copy()
68 extra = _defaultextra.copy()
69 for l in text.split('\0'):
69 for l in text.split('\0'):
70 if l:
70 if l:
71 k, v = _string_unescape(l).split(':', 1)
71 k, v = _string_unescape(l).split(':', 1)
72 extra[k] = v
72 extra[k] = v
73 return extra
73 return extra
74
74
75 def encodeextra(d):
75 def encodeextra(d):
76 # keys must be sorted to produce a deterministic changelog entry
76 # keys must be sorted to produce a deterministic changelog entry
77 items = [
77 items = [
78 _string_escape('%s:%s' % (k, pycompat.bytestr(d[k])))
78 _string_escape('%s:%s' % (k, pycompat.bytestr(d[k])))
79 for k in sorted(d)
79 for k in sorted(d)
80 ]
80 ]
81 return "\0".join(items)
81 return "\0".join(items)
82
82
83 def encodecopies(copies):
84 items = [
85 '%s\0%s' % (k, copies[k])
86 for k in sorted(copies)
87 ]
88 return "\n".join(items)
89
83 def stripdesc(desc):
90 def stripdesc(desc):
84 """strip trailing whitespace and leading and trailing empty lines"""
91 """strip trailing whitespace and leading and trailing empty lines"""
85 return '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
92 return '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
86
93
87 class appender(object):
94 class appender(object):
88 '''the changelog index must be updated last on disk, so we use this class
95 '''the changelog index must be updated last on disk, so we use this class
89 to delay writes to it'''
96 to delay writes to it'''
90 def __init__(self, vfs, name, mode, buf):
97 def __init__(self, vfs, name, mode, buf):
91 self.data = buf
98 self.data = buf
92 fp = vfs(name, mode)
99 fp = vfs(name, mode)
93 self.fp = fp
100 self.fp = fp
94 self.offset = fp.tell()
101 self.offset = fp.tell()
95 self.size = vfs.fstat(fp).st_size
102 self.size = vfs.fstat(fp).st_size
96 self._end = self.size
103 self._end = self.size
97
104
98 def end(self):
105 def end(self):
99 return self._end
106 return self._end
100 def tell(self):
107 def tell(self):
101 return self.offset
108 return self.offset
102 def flush(self):
109 def flush(self):
103 pass
110 pass
104
111
105 @property
112 @property
106 def closed(self):
113 def closed(self):
107 return self.fp.closed
114 return self.fp.closed
108
115
109 def close(self):
116 def close(self):
110 self.fp.close()
117 self.fp.close()
111
118
112 def seek(self, offset, whence=0):
119 def seek(self, offset, whence=0):
113 '''virtual file offset spans real file and data'''
120 '''virtual file offset spans real file and data'''
114 if whence == 0:
121 if whence == 0:
115 self.offset = offset
122 self.offset = offset
116 elif whence == 1:
123 elif whence == 1:
117 self.offset += offset
124 self.offset += offset
118 elif whence == 2:
125 elif whence == 2:
119 self.offset = self.end() + offset
126 self.offset = self.end() + offset
120 if self.offset < self.size:
127 if self.offset < self.size:
121 self.fp.seek(self.offset)
128 self.fp.seek(self.offset)
122
129
123 def read(self, count=-1):
130 def read(self, count=-1):
124 '''only trick here is reads that span real file and data'''
131 '''only trick here is reads that span real file and data'''
125 ret = ""
132 ret = ""
126 if self.offset < self.size:
133 if self.offset < self.size:
127 s = self.fp.read(count)
134 s = self.fp.read(count)
128 ret = s
135 ret = s
129 self.offset += len(s)
136 self.offset += len(s)
130 if count > 0:
137 if count > 0:
131 count -= len(s)
138 count -= len(s)
132 if count != 0:
139 if count != 0:
133 doff = self.offset - self.size
140 doff = self.offset - self.size
134 self.data.insert(0, "".join(self.data))
141 self.data.insert(0, "".join(self.data))
135 del self.data[1:]
142 del self.data[1:]
136 s = self.data[0][doff:doff + count]
143 s = self.data[0][doff:doff + count]
137 self.offset += len(s)
144 self.offset += len(s)
138 ret += s
145 ret += s
139 return ret
146 return ret
140
147
141 def write(self, s):
148 def write(self, s):
142 self.data.append(bytes(s))
149 self.data.append(bytes(s))
143 self.offset += len(s)
150 self.offset += len(s)
144 self._end += len(s)
151 self._end += len(s)
145
152
146 def __enter__(self):
153 def __enter__(self):
147 self.fp.__enter__()
154 self.fp.__enter__()
148 return self
155 return self
149
156
150 def __exit__(self, *args):
157 def __exit__(self, *args):
151 return self.fp.__exit__(*args)
158 return self.fp.__exit__(*args)
152
159
153 def _divertopener(opener, target):
160 def _divertopener(opener, target):
154 """build an opener that writes in 'target.a' instead of 'target'"""
161 """build an opener that writes in 'target.a' instead of 'target'"""
155 def _divert(name, mode='r', checkambig=False):
162 def _divert(name, mode='r', checkambig=False):
156 if name != target:
163 if name != target:
157 return opener(name, mode)
164 return opener(name, mode)
158 return opener(name + ".a", mode)
165 return opener(name + ".a", mode)
159 return _divert
166 return _divert
160
167
161 def _delayopener(opener, target, buf):
168 def _delayopener(opener, target, buf):
162 """build an opener that stores chunks in 'buf' instead of 'target'"""
169 """build an opener that stores chunks in 'buf' instead of 'target'"""
163 def _delay(name, mode='r', checkambig=False):
170 def _delay(name, mode='r', checkambig=False):
164 if name != target:
171 if name != target:
165 return opener(name, mode)
172 return opener(name, mode)
166 return appender(opener, name, mode, buf)
173 return appender(opener, name, mode, buf)
167 return _delay
174 return _delay
168
175
169 @attr.s
176 @attr.s
170 class _changelogrevision(object):
177 class _changelogrevision(object):
171 # Extensions might modify _defaultextra, so let the constructor below pass
178 # Extensions might modify _defaultextra, so let the constructor below pass
172 # it in
179 # it in
173 extra = attr.ib()
180 extra = attr.ib()
174 manifest = attr.ib(default=nullid)
181 manifest = attr.ib(default=nullid)
175 user = attr.ib(default='')
182 user = attr.ib(default='')
176 date = attr.ib(default=(0, 0))
183 date = attr.ib(default=(0, 0))
177 files = attr.ib(default=attr.Factory(list))
184 files = attr.ib(default=attr.Factory(list))
178 description = attr.ib(default='')
185 description = attr.ib(default='')
179
186
180 class changelogrevision(object):
187 class changelogrevision(object):
181 """Holds results of a parsed changelog revision.
188 """Holds results of a parsed changelog revision.
182
189
183 Changelog revisions consist of multiple pieces of data, including
190 Changelog revisions consist of multiple pieces of data, including
184 the manifest node, user, and date. This object exposes a view into
191 the manifest node, user, and date. This object exposes a view into
185 the parsed object.
192 the parsed object.
186 """
193 """
187
194
188 __slots__ = (
195 __slots__ = (
189 r'_offsets',
196 r'_offsets',
190 r'_text',
197 r'_text',
191 )
198 )
192
199
193 def __new__(cls, text):
200 def __new__(cls, text):
194 if not text:
201 if not text:
195 return _changelogrevision(extra=_defaultextra)
202 return _changelogrevision(extra=_defaultextra)
196
203
197 self = super(changelogrevision, cls).__new__(cls)
204 self = super(changelogrevision, cls).__new__(cls)
198 # We could return here and implement the following as an __init__.
205 # We could return here and implement the following as an __init__.
199 # But doing it here is equivalent and saves an extra function call.
206 # But doing it here is equivalent and saves an extra function call.
200
207
201 # format used:
208 # format used:
202 # nodeid\n : manifest node in ascii
209 # nodeid\n : manifest node in ascii
203 # user\n : user, no \n or \r allowed
210 # user\n : user, no \n or \r allowed
204 # time tz extra\n : date (time is int or float, timezone is int)
211 # time tz extra\n : date (time is int or float, timezone is int)
205 # : extra is metadata, encoded and separated by '\0'
212 # : extra is metadata, encoded and separated by '\0'
206 # : older versions ignore it
213 # : older versions ignore it
207 # files\n\n : files modified by the cset, no \n or \r allowed
214 # files\n\n : files modified by the cset, no \n or \r allowed
208 # (.*) : comment (free text, ideally utf-8)
215 # (.*) : comment (free text, ideally utf-8)
209 #
216 #
210 # changelog v0 doesn't use extra
217 # changelog v0 doesn't use extra
211
218
212 nl1 = text.index('\n')
219 nl1 = text.index('\n')
213 nl2 = text.index('\n', nl1 + 1)
220 nl2 = text.index('\n', nl1 + 1)
214 nl3 = text.index('\n', nl2 + 1)
221 nl3 = text.index('\n', nl2 + 1)
215
222
216 # The list of files may be empty. Which means nl3 is the first of the
223 # The list of files may be empty. Which means nl3 is the first of the
217 # double newline that precedes the description.
224 # double newline that precedes the description.
218 if text[nl3 + 1:nl3 + 2] == '\n':
225 if text[nl3 + 1:nl3 + 2] == '\n':
219 doublenl = nl3
226 doublenl = nl3
220 else:
227 else:
221 doublenl = text.index('\n\n', nl3 + 1)
228 doublenl = text.index('\n\n', nl3 + 1)
222
229
223 self._offsets = (nl1, nl2, nl3, doublenl)
230 self._offsets = (nl1, nl2, nl3, doublenl)
224 self._text = text
231 self._text = text
225
232
226 return self
233 return self
227
234
228 @property
235 @property
229 def manifest(self):
236 def manifest(self):
230 return bin(self._text[0:self._offsets[0]])
237 return bin(self._text[0:self._offsets[0]])
231
238
232 @property
239 @property
233 def user(self):
240 def user(self):
234 off = self._offsets
241 off = self._offsets
235 return encoding.tolocal(self._text[off[0] + 1:off[1]])
242 return encoding.tolocal(self._text[off[0] + 1:off[1]])
236
243
237 @property
244 @property
238 def _rawdate(self):
245 def _rawdate(self):
239 off = self._offsets
246 off = self._offsets
240 dateextra = self._text[off[1] + 1:off[2]]
247 dateextra = self._text[off[1] + 1:off[2]]
241 return dateextra.split(' ', 2)[0:2]
248 return dateextra.split(' ', 2)[0:2]
242
249
243 @property
250 @property
244 def _rawextra(self):
251 def _rawextra(self):
245 off = self._offsets
252 off = self._offsets
246 dateextra = self._text[off[1] + 1:off[2]]
253 dateextra = self._text[off[1] + 1:off[2]]
247 fields = dateextra.split(' ', 2)
254 fields = dateextra.split(' ', 2)
248 if len(fields) != 3:
255 if len(fields) != 3:
249 return None
256 return None
250
257
251 return fields[2]
258 return fields[2]
252
259
253 @property
260 @property
254 def date(self):
261 def date(self):
255 raw = self._rawdate
262 raw = self._rawdate
256 time = float(raw[0])
263 time = float(raw[0])
257 # Various tools did silly things with the timezone.
264 # Various tools did silly things with the timezone.
258 try:
265 try:
259 timezone = int(raw[1])
266 timezone = int(raw[1])
260 except ValueError:
267 except ValueError:
261 timezone = 0
268 timezone = 0
262
269
263 return time, timezone
270 return time, timezone
264
271
265 @property
272 @property
266 def extra(self):
273 def extra(self):
267 raw = self._rawextra
274 raw = self._rawextra
268 if raw is None:
275 if raw is None:
269 return _defaultextra
276 return _defaultextra
270
277
271 return decodeextra(raw)
278 return decodeextra(raw)
272
279
273 @property
280 @property
274 def files(self):
281 def files(self):
275 off = self._offsets
282 off = self._offsets
276 if off[2] == off[3]:
283 if off[2] == off[3]:
277 return []
284 return []
278
285
279 return self._text[off[2] + 1:off[3]].split('\n')
286 return self._text[off[2] + 1:off[3]].split('\n')
280
287
281 @property
288 @property
282 def description(self):
289 def description(self):
283 return encoding.tolocal(self._text[self._offsets[3] + 2:])
290 return encoding.tolocal(self._text[self._offsets[3] + 2:])
284
291
285 class changelog(revlog.revlog):
292 class changelog(revlog.revlog):
286 def __init__(self, opener, trypending=False):
293 def __init__(self, opener, trypending=False):
287 """Load a changelog revlog using an opener.
294 """Load a changelog revlog using an opener.
288
295
289 If ``trypending`` is true, we attempt to load the index from a
296 If ``trypending`` is true, we attempt to load the index from a
290 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
297 ``00changelog.i.a`` file instead of the default ``00changelog.i``.
291 The ``00changelog.i.a`` file contains index (and possibly inline
298 The ``00changelog.i.a`` file contains index (and possibly inline
292 revision) data for a transaction that hasn't been finalized yet.
299 revision) data for a transaction that hasn't been finalized yet.
293 It exists in a separate file to facilitate readers (such as
300 It exists in a separate file to facilitate readers (such as
294 hooks processes) accessing data before a transaction is finalized.
301 hooks processes) accessing data before a transaction is finalized.
295 """
302 """
296 if trypending and opener.exists('00changelog.i.a'):
303 if trypending and opener.exists('00changelog.i.a'):
297 indexfile = '00changelog.i.a'
304 indexfile = '00changelog.i.a'
298 else:
305 else:
299 indexfile = '00changelog.i'
306 indexfile = '00changelog.i'
300
307
301 datafile = '00changelog.d'
308 datafile = '00changelog.d'
302 revlog.revlog.__init__(self, opener, indexfile, datafile=datafile,
309 revlog.revlog.__init__(self, opener, indexfile, datafile=datafile,
303 checkambig=True, mmaplargeindex=True)
310 checkambig=True, mmaplargeindex=True)
304
311
305 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
312 if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
306 # changelogs don't benefit from generaldelta.
313 # changelogs don't benefit from generaldelta.
307
314
308 self.version &= ~revlog.FLAG_GENERALDELTA
315 self.version &= ~revlog.FLAG_GENERALDELTA
309 self._generaldelta = False
316 self._generaldelta = False
310
317
311 # Delta chains for changelogs tend to be very small because entries
318 # Delta chains for changelogs tend to be very small because entries
312 # tend to be small and don't delta well with each. So disable delta
319 # tend to be small and don't delta well with each. So disable delta
313 # chains.
320 # chains.
314 self._storedeltachains = False
321 self._storedeltachains = False
315
322
316 self._realopener = opener
323 self._realopener = opener
317 self._delayed = False
324 self._delayed = False
318 self._delaybuf = None
325 self._delaybuf = None
319 self._divert = False
326 self._divert = False
320 self.filteredrevs = frozenset()
327 self.filteredrevs = frozenset()
321
328
322 def tiprev(self):
329 def tiprev(self):
323 for i in pycompat.xrange(len(self) -1, -2, -1):
330 for i in pycompat.xrange(len(self) -1, -2, -1):
324 if i not in self.filteredrevs:
331 if i not in self.filteredrevs:
325 return i
332 return i
326
333
327 def tip(self):
334 def tip(self):
328 """filtered version of revlog.tip"""
335 """filtered version of revlog.tip"""
329 return self.node(self.tiprev())
336 return self.node(self.tiprev())
330
337
331 def __contains__(self, rev):
338 def __contains__(self, rev):
332 """filtered version of revlog.__contains__"""
339 """filtered version of revlog.__contains__"""
333 return (0 <= rev < len(self)
340 return (0 <= rev < len(self)
334 and rev not in self.filteredrevs)
341 and rev not in self.filteredrevs)
335
342
336 def __iter__(self):
343 def __iter__(self):
337 """filtered version of revlog.__iter__"""
344 """filtered version of revlog.__iter__"""
338 if len(self.filteredrevs) == 0:
345 if len(self.filteredrevs) == 0:
339 return revlog.revlog.__iter__(self)
346 return revlog.revlog.__iter__(self)
340
347
341 def filterediter():
348 def filterediter():
342 for i in pycompat.xrange(len(self)):
349 for i in pycompat.xrange(len(self)):
343 if i not in self.filteredrevs:
350 if i not in self.filteredrevs:
344 yield i
351 yield i
345
352
346 return filterediter()
353 return filterediter()
347
354
348 def revs(self, start=0, stop=None):
355 def revs(self, start=0, stop=None):
349 """filtered version of revlog.revs"""
356 """filtered version of revlog.revs"""
350 for i in super(changelog, self).revs(start, stop):
357 for i in super(changelog, self).revs(start, stop):
351 if i not in self.filteredrevs:
358 if i not in self.filteredrevs:
352 yield i
359 yield i
353
360
354 def reachableroots(self, minroot, heads, roots, includepath=False):
361 def reachableroots(self, minroot, heads, roots, includepath=False):
355 return self.index.reachableroots2(minroot, heads, roots, includepath)
362 return self.index.reachableroots2(minroot, heads, roots, includepath)
356
363
357 def _checknofilteredinrevs(self, revs):
364 def _checknofilteredinrevs(self, revs):
358 """raise the appropriate error if 'revs' contains a filtered revision
365 """raise the appropriate error if 'revs' contains a filtered revision
359
366
360 This returns a version of 'revs' to be used thereafter by the caller.
367 This returns a version of 'revs' to be used thereafter by the caller.
361 In particular, if revs is an iterator, it is converted into a set.
368 In particular, if revs is an iterator, it is converted into a set.
362 """
369 """
363 safehasattr = util.safehasattr
370 safehasattr = util.safehasattr
364 if safehasattr(revs, '__next__'):
371 if safehasattr(revs, '__next__'):
365 # Note that inspect.isgenerator() is not true for iterators,
372 # Note that inspect.isgenerator() is not true for iterators,
366 revs = set(revs)
373 revs = set(revs)
367
374
368 filteredrevs = self.filteredrevs
375 filteredrevs = self.filteredrevs
369 if safehasattr(revs, 'first'): # smartset
376 if safehasattr(revs, 'first'): # smartset
370 offenders = revs & filteredrevs
377 offenders = revs & filteredrevs
371 else:
378 else:
372 offenders = filteredrevs.intersection(revs)
379 offenders = filteredrevs.intersection(revs)
373
380
374 for rev in offenders:
381 for rev in offenders:
375 raise error.FilteredIndexError(rev)
382 raise error.FilteredIndexError(rev)
376 return revs
383 return revs
377
384
378 def headrevs(self, revs=None):
385 def headrevs(self, revs=None):
379 if revs is None and self.filteredrevs:
386 if revs is None and self.filteredrevs:
380 try:
387 try:
381 return self.index.headrevsfiltered(self.filteredrevs)
388 return self.index.headrevsfiltered(self.filteredrevs)
382 # AttributeError covers non-c-extension environments and
389 # AttributeError covers non-c-extension environments and
383 # old c extensions without filter handling.
390 # old c extensions without filter handling.
384 except AttributeError:
391 except AttributeError:
385 return self._headrevs()
392 return self._headrevs()
386
393
387 if self.filteredrevs:
394 if self.filteredrevs:
388 revs = self._checknofilteredinrevs(revs)
395 revs = self._checknofilteredinrevs(revs)
389 return super(changelog, self).headrevs(revs)
396 return super(changelog, self).headrevs(revs)
390
397
391 def strip(self, *args, **kwargs):
398 def strip(self, *args, **kwargs):
392 # XXX make something better than assert
399 # XXX make something better than assert
393 # We can't expect proper strip behavior if we are filtered.
400 # We can't expect proper strip behavior if we are filtered.
394 assert not self.filteredrevs
401 assert not self.filteredrevs
395 super(changelog, self).strip(*args, **kwargs)
402 super(changelog, self).strip(*args, **kwargs)
396
403
397 def rev(self, node):
404 def rev(self, node):
398 """filtered version of revlog.rev"""
405 """filtered version of revlog.rev"""
399 r = super(changelog, self).rev(node)
406 r = super(changelog, self).rev(node)
400 if r in self.filteredrevs:
407 if r in self.filteredrevs:
401 raise error.FilteredLookupError(hex(node), self.indexfile,
408 raise error.FilteredLookupError(hex(node), self.indexfile,
402 _('filtered node'))
409 _('filtered node'))
403 return r
410 return r
404
411
405 def node(self, rev):
412 def node(self, rev):
406 """filtered version of revlog.node"""
413 """filtered version of revlog.node"""
407 if rev in self.filteredrevs:
414 if rev in self.filteredrevs:
408 raise error.FilteredIndexError(rev)
415 raise error.FilteredIndexError(rev)
409 return super(changelog, self).node(rev)
416 return super(changelog, self).node(rev)
410
417
411 def linkrev(self, rev):
418 def linkrev(self, rev):
412 """filtered version of revlog.linkrev"""
419 """filtered version of revlog.linkrev"""
413 if rev in self.filteredrevs:
420 if rev in self.filteredrevs:
414 raise error.FilteredIndexError(rev)
421 raise error.FilteredIndexError(rev)
415 return super(changelog, self).linkrev(rev)
422 return super(changelog, self).linkrev(rev)
416
423
417 def parentrevs(self, rev):
424 def parentrevs(self, rev):
418 """filtered version of revlog.parentrevs"""
425 """filtered version of revlog.parentrevs"""
419 if rev in self.filteredrevs:
426 if rev in self.filteredrevs:
420 raise error.FilteredIndexError(rev)
427 raise error.FilteredIndexError(rev)
421 return super(changelog, self).parentrevs(rev)
428 return super(changelog, self).parentrevs(rev)
422
429
423 def flags(self, rev):
430 def flags(self, rev):
424 """filtered version of revlog.flags"""
431 """filtered version of revlog.flags"""
425 if rev in self.filteredrevs:
432 if rev in self.filteredrevs:
426 raise error.FilteredIndexError(rev)
433 raise error.FilteredIndexError(rev)
427 return super(changelog, self).flags(rev)
434 return super(changelog, self).flags(rev)
428
435
429 def delayupdate(self, tr):
436 def delayupdate(self, tr):
430 "delay visibility of index updates to other readers"
437 "delay visibility of index updates to other readers"
431
438
432 if not self._delayed:
439 if not self._delayed:
433 if len(self) == 0:
440 if len(self) == 0:
434 self._divert = True
441 self._divert = True
435 if self._realopener.exists(self.indexfile + '.a'):
442 if self._realopener.exists(self.indexfile + '.a'):
436 self._realopener.unlink(self.indexfile + '.a')
443 self._realopener.unlink(self.indexfile + '.a')
437 self.opener = _divertopener(self._realopener, self.indexfile)
444 self.opener = _divertopener(self._realopener, self.indexfile)
438 else:
445 else:
439 self._delaybuf = []
446 self._delaybuf = []
440 self.opener = _delayopener(self._realopener, self.indexfile,
447 self.opener = _delayopener(self._realopener, self.indexfile,
441 self._delaybuf)
448 self._delaybuf)
442 self._delayed = True
449 self._delayed = True
443 tr.addpending('cl-%i' % id(self), self._writepending)
450 tr.addpending('cl-%i' % id(self), self._writepending)
444 tr.addfinalize('cl-%i' % id(self), self._finalize)
451 tr.addfinalize('cl-%i' % id(self), self._finalize)
445
452
446 def _finalize(self, tr):
453 def _finalize(self, tr):
447 "finalize index updates"
454 "finalize index updates"
448 self._delayed = False
455 self._delayed = False
449 self.opener = self._realopener
456 self.opener = self._realopener
450 # move redirected index data back into place
457 # move redirected index data back into place
451 if self._divert:
458 if self._divert:
452 assert not self._delaybuf
459 assert not self._delaybuf
453 tmpname = self.indexfile + ".a"
460 tmpname = self.indexfile + ".a"
454 nfile = self.opener.open(tmpname)
461 nfile = self.opener.open(tmpname)
455 nfile.close()
462 nfile.close()
456 self.opener.rename(tmpname, self.indexfile, checkambig=True)
463 self.opener.rename(tmpname, self.indexfile, checkambig=True)
457 elif self._delaybuf:
464 elif self._delaybuf:
458 fp = self.opener(self.indexfile, 'a', checkambig=True)
465 fp = self.opener(self.indexfile, 'a', checkambig=True)
459 fp.write("".join(self._delaybuf))
466 fp.write("".join(self._delaybuf))
460 fp.close()
467 fp.close()
461 self._delaybuf = None
468 self._delaybuf = None
462 self._divert = False
469 self._divert = False
463 # split when we're done
470 # split when we're done
464 self._enforceinlinesize(tr)
471 self._enforceinlinesize(tr)
465
472
466 def _writepending(self, tr):
473 def _writepending(self, tr):
467 "create a file containing the unfinalized state for pretxnchangegroup"
474 "create a file containing the unfinalized state for pretxnchangegroup"
468 if self._delaybuf:
475 if self._delaybuf:
469 # make a temporary copy of the index
476 # make a temporary copy of the index
470 fp1 = self._realopener(self.indexfile)
477 fp1 = self._realopener(self.indexfile)
471 pendingfilename = self.indexfile + ".a"
478 pendingfilename = self.indexfile + ".a"
472 # register as a temp file to ensure cleanup on failure
479 # register as a temp file to ensure cleanup on failure
473 tr.registertmp(pendingfilename)
480 tr.registertmp(pendingfilename)
474 # write existing data
481 # write existing data
475 fp2 = self._realopener(pendingfilename, "w")
482 fp2 = self._realopener(pendingfilename, "w")
476 fp2.write(fp1.read())
483 fp2.write(fp1.read())
477 # add pending data
484 # add pending data
478 fp2.write("".join(self._delaybuf))
485 fp2.write("".join(self._delaybuf))
479 fp2.close()
486 fp2.close()
480 # switch modes so finalize can simply rename
487 # switch modes so finalize can simply rename
481 self._delaybuf = None
488 self._delaybuf = None
482 self._divert = True
489 self._divert = True
483 self.opener = _divertopener(self._realopener, self.indexfile)
490 self.opener = _divertopener(self._realopener, self.indexfile)
484
491
485 if self._divert:
492 if self._divert:
486 return True
493 return True
487
494
488 return False
495 return False
489
496
490 def _enforceinlinesize(self, tr, fp=None):
497 def _enforceinlinesize(self, tr, fp=None):
491 if not self._delayed:
498 if not self._delayed:
492 revlog.revlog._enforceinlinesize(self, tr, fp)
499 revlog.revlog._enforceinlinesize(self, tr, fp)
493
500
494 def read(self, node):
501 def read(self, node):
495 """Obtain data from a parsed changelog revision.
502 """Obtain data from a parsed changelog revision.
496
503
497 Returns a 6-tuple of:
504 Returns a 6-tuple of:
498
505
499 - manifest node in binary
506 - manifest node in binary
500 - author/user as a localstr
507 - author/user as a localstr
501 - date as a 2-tuple of (time, timezone)
508 - date as a 2-tuple of (time, timezone)
502 - list of files
509 - list of files
503 - commit message as a localstr
510 - commit message as a localstr
504 - dict of extra metadata
511 - dict of extra metadata
505
512
506 Unless you need to access all fields, consider calling
513 Unless you need to access all fields, consider calling
507 ``changelogrevision`` instead, as it is faster for partial object
514 ``changelogrevision`` instead, as it is faster for partial object
508 access.
515 access.
509 """
516 """
510 c = changelogrevision(self.revision(node))
517 c = changelogrevision(self.revision(node))
511 return (
518 return (
512 c.manifest,
519 c.manifest,
513 c.user,
520 c.user,
514 c.date,
521 c.date,
515 c.files,
522 c.files,
516 c.description,
523 c.description,
517 c.extra
524 c.extra
518 )
525 )
519
526
520 def changelogrevision(self, nodeorrev):
527 def changelogrevision(self, nodeorrev):
521 """Obtain a ``changelogrevision`` for a node or revision."""
528 """Obtain a ``changelogrevision`` for a node or revision."""
522 return changelogrevision(self.revision(nodeorrev))
529 return changelogrevision(self.revision(nodeorrev))
523
530
524 def readfiles(self, node):
531 def readfiles(self, node):
525 """
532 """
526 short version of read that only returns the files modified by the cset
533 short version of read that only returns the files modified by the cset
527 """
534 """
528 text = self.revision(node)
535 text = self.revision(node)
529 if not text:
536 if not text:
530 return []
537 return []
531 last = text.index("\n\n")
538 last = text.index("\n\n")
532 l = text[:last].split('\n')
539 l = text[:last].split('\n')
533 return l[3:]
540 return l[3:]
534
541
535 def add(self, manifest, files, desc, transaction, p1, p2,
542 def add(self, manifest, files, desc, transaction, p1, p2,
536 user, date=None, extra=None):
543 user, date=None, extra=None, p1copies=None, p2copies=None):
537 # Convert to UTF-8 encoded bytestrings as the very first
544 # Convert to UTF-8 encoded bytestrings as the very first
538 # thing: calling any method on a localstr object will turn it
545 # thing: calling any method on a localstr object will turn it
539 # into a str object and the cached UTF-8 string is thus lost.
546 # into a str object and the cached UTF-8 string is thus lost.
540 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
547 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
541
548
542 user = user.strip()
549 user = user.strip()
543 # An empty username or a username with a "\n" will make the
550 # An empty username or a username with a "\n" will make the
544 # revision text contain two "\n\n" sequences -> corrupt
551 # revision text contain two "\n\n" sequences -> corrupt
545 # repository since read cannot unpack the revision.
552 # repository since read cannot unpack the revision.
546 if not user:
553 if not user:
547 raise error.StorageError(_("empty username"))
554 raise error.StorageError(_("empty username"))
548 if "\n" in user:
555 if "\n" in user:
549 raise error.StorageError(_("username %r contains a newline")
556 raise error.StorageError(_("username %r contains a newline")
550 % pycompat.bytestr(user))
557 % pycompat.bytestr(user))
551
558
552 desc = stripdesc(desc)
559 desc = stripdesc(desc)
553
560
554 if date:
561 if date:
555 parseddate = "%d %d" % dateutil.parsedate(date)
562 parseddate = "%d %d" % dateutil.parsedate(date)
556 else:
563 else:
557 parseddate = "%d %d" % dateutil.makedate()
564 parseddate = "%d %d" % dateutil.makedate()
558 if extra:
565 if extra:
559 branch = extra.get("branch")
566 branch = extra.get("branch")
560 if branch in ("default", ""):
567 if branch in ("default", ""):
561 del extra["branch"]
568 del extra["branch"]
562 elif branch in (".", "null", "tip"):
569 elif branch in (".", "null", "tip"):
563 raise error.StorageError(_('the name \'%s\' is reserved')
570 raise error.StorageError(_('the name \'%s\' is reserved')
564 % branch)
571 % branch)
572 if (p1copies or p2copies) and extra is None:
573 extra = {}
574 if p1copies:
575 extra['p1copies'] = encodecopies(p1copies)
576 if p2copies:
577 extra['p2copies'] = encodecopies(p2copies)
578
565 if extra:
579 if extra:
566 extra = encodeextra(extra)
580 extra = encodeextra(extra)
567 parseddate = "%s %s" % (parseddate, extra)
581 parseddate = "%s %s" % (parseddate, extra)
568 l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc]
582 l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc]
569 text = "\n".join(l)
583 text = "\n".join(l)
570 return self.addrevision(text, transaction, len(self), p1, p2)
584 return self.addrevision(text, transaction, len(self), p1, p2)
571
585
572 def branchinfo(self, rev):
586 def branchinfo(self, rev):
573 """return the branch name and open/close state of a revision
587 """return the branch name and open/close state of a revision
574
588
575 This function exists because creating a changectx object
589 This function exists because creating a changectx object
576 just to access this is costly."""
590 just to access this is costly."""
577 extra = self.read(rev)[5]
591 extra = self.read(rev)[5]
578 return encoding.tolocal(extra.get("branch")), 'close' in extra
592 return encoding.tolocal(extra.get("branch")), 'close' in extra
579
593
580 def _nodeduplicatecallback(self, transaction, node):
594 def _nodeduplicatecallback(self, transaction, node):
581 # keep track of revisions that got "re-added", eg: unbunde of know rev.
595 # keep track of revisions that got "re-added", eg: unbunde of know rev.
582 #
596 #
583 # We track them in a list to preserve their order from the source bundle
597 # We track them in a list to preserve their order from the source bundle
584 duplicates = transaction.changes.setdefault('revduplicates', [])
598 duplicates = transaction.changes.setdefault('revduplicates', [])
585 duplicates.append(self.rev(node))
599 duplicates.append(self.rev(node))
@@ -1,1474 +1,1477
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11 import re
11 import re
12
12
13 from . import (
13 from . import (
14 encoding,
14 encoding,
15 error,
15 error,
16 )
16 )
17
17
18 def loadconfigtable(ui, extname, configtable):
18 def loadconfigtable(ui, extname, configtable):
19 """update config item known to the ui with the extension ones"""
19 """update config item known to the ui with the extension ones"""
20 for section, items in sorted(configtable.items()):
20 for section, items in sorted(configtable.items()):
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 knownkeys = set(knownitems)
22 knownkeys = set(knownitems)
23 newkeys = set(items)
23 newkeys = set(items)
24 for key in sorted(knownkeys & newkeys):
24 for key in sorted(knownkeys & newkeys):
25 msg = "extension '%s' overwrite config item '%s.%s'"
25 msg = "extension '%s' overwrite config item '%s.%s'"
26 msg %= (extname, section, key)
26 msg %= (extname, section, key)
27 ui.develwarn(msg, config='warn-config')
27 ui.develwarn(msg, config='warn-config')
28
28
29 knownitems.update(items)
29 knownitems.update(items)
30
30
31 class configitem(object):
31 class configitem(object):
32 """represent a known config item
32 """represent a known config item
33
33
34 :section: the official config section where to find this item,
34 :section: the official config section where to find this item,
35 :name: the official name within the section,
35 :name: the official name within the section,
36 :default: default value for this item,
36 :default: default value for this item,
37 :alias: optional list of tuples as alternatives,
37 :alias: optional list of tuples as alternatives,
38 :generic: this is a generic definition, match name using regular expression.
38 :generic: this is a generic definition, match name using regular expression.
39 """
39 """
40
40
41 def __init__(self, section, name, default=None, alias=(),
41 def __init__(self, section, name, default=None, alias=(),
42 generic=False, priority=0):
42 generic=False, priority=0):
43 self.section = section
43 self.section = section
44 self.name = name
44 self.name = name
45 self.default = default
45 self.default = default
46 self.alias = list(alias)
46 self.alias = list(alias)
47 self.generic = generic
47 self.generic = generic
48 self.priority = priority
48 self.priority = priority
49 self._re = None
49 self._re = None
50 if generic:
50 if generic:
51 self._re = re.compile(self.name)
51 self._re = re.compile(self.name)
52
52
53 class itemregister(dict):
53 class itemregister(dict):
54 """A specialized dictionary that can handle wild-card selection"""
54 """A specialized dictionary that can handle wild-card selection"""
55
55
56 def __init__(self):
56 def __init__(self):
57 super(itemregister, self).__init__()
57 super(itemregister, self).__init__()
58 self._generics = set()
58 self._generics = set()
59
59
60 def update(self, other):
60 def update(self, other):
61 super(itemregister, self).update(other)
61 super(itemregister, self).update(other)
62 self._generics.update(other._generics)
62 self._generics.update(other._generics)
63
63
64 def __setitem__(self, key, item):
64 def __setitem__(self, key, item):
65 super(itemregister, self).__setitem__(key, item)
65 super(itemregister, self).__setitem__(key, item)
66 if item.generic:
66 if item.generic:
67 self._generics.add(item)
67 self._generics.add(item)
68
68
69 def get(self, key):
69 def get(self, key):
70 baseitem = super(itemregister, self).get(key)
70 baseitem = super(itemregister, self).get(key)
71 if baseitem is not None and not baseitem.generic:
71 if baseitem is not None and not baseitem.generic:
72 return baseitem
72 return baseitem
73
73
74 # search for a matching generic item
74 # search for a matching generic item
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
76 for item in generics:
76 for item in generics:
77 # we use 'match' instead of 'search' to make the matching simpler
77 # we use 'match' instead of 'search' to make the matching simpler
78 # for people unfamiliar with regular expression. Having the match
78 # for people unfamiliar with regular expression. Having the match
79 # rooted to the start of the string will produce less surprising
79 # rooted to the start of the string will produce less surprising
80 # result for user writing simple regex for sub-attribute.
80 # result for user writing simple regex for sub-attribute.
81 #
81 #
82 # For example using "color\..*" match produces an unsurprising
82 # For example using "color\..*" match produces an unsurprising
83 # result, while using search could suddenly match apparently
83 # result, while using search could suddenly match apparently
84 # unrelated configuration that happens to contains "color."
84 # unrelated configuration that happens to contains "color."
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
86 # some match to avoid the need to prefix most pattern with "^".
86 # some match to avoid the need to prefix most pattern with "^".
87 # The "^" seems more error prone.
87 # The "^" seems more error prone.
88 if item._re.match(key):
88 if item._re.match(key):
89 return item
89 return item
90
90
91 return None
91 return None
92
92
93 coreitems = {}
93 coreitems = {}
94
94
95 def _register(configtable, *args, **kwargs):
95 def _register(configtable, *args, **kwargs):
96 item = configitem(*args, **kwargs)
96 item = configitem(*args, **kwargs)
97 section = configtable.setdefault(item.section, itemregister())
97 section = configtable.setdefault(item.section, itemregister())
98 if item.name in section:
98 if item.name in section:
99 msg = "duplicated config item registration for '%s.%s'"
99 msg = "duplicated config item registration for '%s.%s'"
100 raise error.ProgrammingError(msg % (item.section, item.name))
100 raise error.ProgrammingError(msg % (item.section, item.name))
101 section[item.name] = item
101 section[item.name] = item
102
102
103 # special value for case where the default is derived from other values
103 # special value for case where the default is derived from other values
104 dynamicdefault = object()
104 dynamicdefault = object()
105
105
106 # Registering actual config items
106 # Registering actual config items
107
107
108 def getitemregister(configtable):
108 def getitemregister(configtable):
109 f = functools.partial(_register, configtable)
109 f = functools.partial(_register, configtable)
110 # export pseudo enum as configitem.*
110 # export pseudo enum as configitem.*
111 f.dynamicdefault = dynamicdefault
111 f.dynamicdefault = dynamicdefault
112 return f
112 return f
113
113
114 coreconfigitem = getitemregister(coreitems)
114 coreconfigitem = getitemregister(coreitems)
115
115
116 def _registerdiffopts(section, configprefix=''):
116 def _registerdiffopts(section, configprefix=''):
117 coreconfigitem(section, configprefix + 'nodates',
117 coreconfigitem(section, configprefix + 'nodates',
118 default=False,
118 default=False,
119 )
119 )
120 coreconfigitem(section, configprefix + 'showfunc',
120 coreconfigitem(section, configprefix + 'showfunc',
121 default=False,
121 default=False,
122 )
122 )
123 coreconfigitem(section, configprefix + 'unified',
123 coreconfigitem(section, configprefix + 'unified',
124 default=None,
124 default=None,
125 )
125 )
126 coreconfigitem(section, configprefix + 'git',
126 coreconfigitem(section, configprefix + 'git',
127 default=False,
127 default=False,
128 )
128 )
129 coreconfigitem(section, configprefix + 'ignorews',
129 coreconfigitem(section, configprefix + 'ignorews',
130 default=False,
130 default=False,
131 )
131 )
132 coreconfigitem(section, configprefix + 'ignorewsamount',
132 coreconfigitem(section, configprefix + 'ignorewsamount',
133 default=False,
133 default=False,
134 )
134 )
135 coreconfigitem(section, configprefix + 'ignoreblanklines',
135 coreconfigitem(section, configprefix + 'ignoreblanklines',
136 default=False,
136 default=False,
137 )
137 )
138 coreconfigitem(section, configprefix + 'ignorewseol',
138 coreconfigitem(section, configprefix + 'ignorewseol',
139 default=False,
139 default=False,
140 )
140 )
141 coreconfigitem(section, configprefix + 'nobinary',
141 coreconfigitem(section, configprefix + 'nobinary',
142 default=False,
142 default=False,
143 )
143 )
144 coreconfigitem(section, configprefix + 'noprefix',
144 coreconfigitem(section, configprefix + 'noprefix',
145 default=False,
145 default=False,
146 )
146 )
147 coreconfigitem(section, configprefix + 'word-diff',
147 coreconfigitem(section, configprefix + 'word-diff',
148 default=False,
148 default=False,
149 )
149 )
150
150
151 coreconfigitem('alias', '.*',
151 coreconfigitem('alias', '.*',
152 default=dynamicdefault,
152 default=dynamicdefault,
153 generic=True,
153 generic=True,
154 )
154 )
155 coreconfigitem('auth', 'cookiefile',
155 coreconfigitem('auth', 'cookiefile',
156 default=None,
156 default=None,
157 )
157 )
158 _registerdiffopts(section='annotate')
158 _registerdiffopts(section='annotate')
159 # bookmarks.pushing: internal hack for discovery
159 # bookmarks.pushing: internal hack for discovery
160 coreconfigitem('bookmarks', 'pushing',
160 coreconfigitem('bookmarks', 'pushing',
161 default=list,
161 default=list,
162 )
162 )
163 # bundle.mainreporoot: internal hack for bundlerepo
163 # bundle.mainreporoot: internal hack for bundlerepo
164 coreconfigitem('bundle', 'mainreporoot',
164 coreconfigitem('bundle', 'mainreporoot',
165 default='',
165 default='',
166 )
166 )
167 coreconfigitem('censor', 'policy',
167 coreconfigitem('censor', 'policy',
168 default='abort',
168 default='abort',
169 )
169 )
170 coreconfigitem('chgserver', 'idletimeout',
170 coreconfigitem('chgserver', 'idletimeout',
171 default=3600,
171 default=3600,
172 )
172 )
173 coreconfigitem('chgserver', 'skiphash',
173 coreconfigitem('chgserver', 'skiphash',
174 default=False,
174 default=False,
175 )
175 )
176 coreconfigitem('cmdserver', 'log',
176 coreconfigitem('cmdserver', 'log',
177 default=None,
177 default=None,
178 )
178 )
179 coreconfigitem('cmdserver', 'max-log-files',
179 coreconfigitem('cmdserver', 'max-log-files',
180 default=7,
180 default=7,
181 )
181 )
182 coreconfigitem('cmdserver', 'max-log-size',
182 coreconfigitem('cmdserver', 'max-log-size',
183 default='1 MB',
183 default='1 MB',
184 )
184 )
185 coreconfigitem('cmdserver', 'max-repo-cache',
185 coreconfigitem('cmdserver', 'max-repo-cache',
186 default=0,
186 default=0,
187 )
187 )
188 coreconfigitem('cmdserver', 'message-encodings',
188 coreconfigitem('cmdserver', 'message-encodings',
189 default=list,
189 default=list,
190 )
190 )
191 coreconfigitem('cmdserver', 'track-log',
191 coreconfigitem('cmdserver', 'track-log',
192 default=lambda: ['chgserver', 'cmdserver', 'repocache'],
192 default=lambda: ['chgserver', 'cmdserver', 'repocache'],
193 )
193 )
194 coreconfigitem('color', '.*',
194 coreconfigitem('color', '.*',
195 default=None,
195 default=None,
196 generic=True,
196 generic=True,
197 )
197 )
198 coreconfigitem('color', 'mode',
198 coreconfigitem('color', 'mode',
199 default='auto',
199 default='auto',
200 )
200 )
201 coreconfigitem('color', 'pagermode',
201 coreconfigitem('color', 'pagermode',
202 default=dynamicdefault,
202 default=dynamicdefault,
203 )
203 )
204 _registerdiffopts(section='commands', configprefix='commit.interactive.')
204 _registerdiffopts(section='commands', configprefix='commit.interactive.')
205 coreconfigitem('commands', 'grep.all-files',
205 coreconfigitem('commands', 'grep.all-files',
206 default=False,
206 default=False,
207 )
207 )
208 coreconfigitem('commands', 'resolve.confirm',
208 coreconfigitem('commands', 'resolve.confirm',
209 default=False,
209 default=False,
210 )
210 )
211 coreconfigitem('commands', 'resolve.explicit-re-merge',
211 coreconfigitem('commands', 'resolve.explicit-re-merge',
212 default=False,
212 default=False,
213 )
213 )
214 coreconfigitem('commands', 'resolve.mark-check',
214 coreconfigitem('commands', 'resolve.mark-check',
215 default='none',
215 default='none',
216 )
216 )
217 _registerdiffopts(section='commands', configprefix='revert.interactive.')
217 _registerdiffopts(section='commands', configprefix='revert.interactive.')
218 coreconfigitem('commands', 'show.aliasprefix',
218 coreconfigitem('commands', 'show.aliasprefix',
219 default=list,
219 default=list,
220 )
220 )
221 coreconfigitem('commands', 'status.relative',
221 coreconfigitem('commands', 'status.relative',
222 default=False,
222 default=False,
223 )
223 )
224 coreconfigitem('commands', 'status.skipstates',
224 coreconfigitem('commands', 'status.skipstates',
225 default=[],
225 default=[],
226 )
226 )
227 coreconfigitem('commands', 'status.terse',
227 coreconfigitem('commands', 'status.terse',
228 default='',
228 default='',
229 )
229 )
230 coreconfigitem('commands', 'status.verbose',
230 coreconfigitem('commands', 'status.verbose',
231 default=False,
231 default=False,
232 )
232 )
233 coreconfigitem('commands', 'update.check',
233 coreconfigitem('commands', 'update.check',
234 default=None,
234 default=None,
235 )
235 )
236 coreconfigitem('commands', 'update.requiredest',
236 coreconfigitem('commands', 'update.requiredest',
237 default=False,
237 default=False,
238 )
238 )
239 coreconfigitem('committemplate', '.*',
239 coreconfigitem('committemplate', '.*',
240 default=None,
240 default=None,
241 generic=True,
241 generic=True,
242 )
242 )
243 coreconfigitem('convert', 'bzr.saverev',
243 coreconfigitem('convert', 'bzr.saverev',
244 default=True,
244 default=True,
245 )
245 )
246 coreconfigitem('convert', 'cvsps.cache',
246 coreconfigitem('convert', 'cvsps.cache',
247 default=True,
247 default=True,
248 )
248 )
249 coreconfigitem('convert', 'cvsps.fuzz',
249 coreconfigitem('convert', 'cvsps.fuzz',
250 default=60,
250 default=60,
251 )
251 )
252 coreconfigitem('convert', 'cvsps.logencoding',
252 coreconfigitem('convert', 'cvsps.logencoding',
253 default=None,
253 default=None,
254 )
254 )
255 coreconfigitem('convert', 'cvsps.mergefrom',
255 coreconfigitem('convert', 'cvsps.mergefrom',
256 default=None,
256 default=None,
257 )
257 )
258 coreconfigitem('convert', 'cvsps.mergeto',
258 coreconfigitem('convert', 'cvsps.mergeto',
259 default=None,
259 default=None,
260 )
260 )
261 coreconfigitem('convert', 'git.committeractions',
261 coreconfigitem('convert', 'git.committeractions',
262 default=lambda: ['messagedifferent'],
262 default=lambda: ['messagedifferent'],
263 )
263 )
264 coreconfigitem('convert', 'git.extrakeys',
264 coreconfigitem('convert', 'git.extrakeys',
265 default=list,
265 default=list,
266 )
266 )
267 coreconfigitem('convert', 'git.findcopiesharder',
267 coreconfigitem('convert', 'git.findcopiesharder',
268 default=False,
268 default=False,
269 )
269 )
270 coreconfigitem('convert', 'git.remoteprefix',
270 coreconfigitem('convert', 'git.remoteprefix',
271 default='remote',
271 default='remote',
272 )
272 )
273 coreconfigitem('convert', 'git.renamelimit',
273 coreconfigitem('convert', 'git.renamelimit',
274 default=400,
274 default=400,
275 )
275 )
276 coreconfigitem('convert', 'git.saverev',
276 coreconfigitem('convert', 'git.saverev',
277 default=True,
277 default=True,
278 )
278 )
279 coreconfigitem('convert', 'git.similarity',
279 coreconfigitem('convert', 'git.similarity',
280 default=50,
280 default=50,
281 )
281 )
282 coreconfigitem('convert', 'git.skipsubmodules',
282 coreconfigitem('convert', 'git.skipsubmodules',
283 default=False,
283 default=False,
284 )
284 )
285 coreconfigitem('convert', 'hg.clonebranches',
285 coreconfigitem('convert', 'hg.clonebranches',
286 default=False,
286 default=False,
287 )
287 )
288 coreconfigitem('convert', 'hg.ignoreerrors',
288 coreconfigitem('convert', 'hg.ignoreerrors',
289 default=False,
289 default=False,
290 )
290 )
291 coreconfigitem('convert', 'hg.revs',
291 coreconfigitem('convert', 'hg.revs',
292 default=None,
292 default=None,
293 )
293 )
294 coreconfigitem('convert', 'hg.saverev',
294 coreconfigitem('convert', 'hg.saverev',
295 default=False,
295 default=False,
296 )
296 )
297 coreconfigitem('convert', 'hg.sourcename',
297 coreconfigitem('convert', 'hg.sourcename',
298 default=None,
298 default=None,
299 )
299 )
300 coreconfigitem('convert', 'hg.startrev',
300 coreconfigitem('convert', 'hg.startrev',
301 default=None,
301 default=None,
302 )
302 )
303 coreconfigitem('convert', 'hg.tagsbranch',
303 coreconfigitem('convert', 'hg.tagsbranch',
304 default='default',
304 default='default',
305 )
305 )
306 coreconfigitem('convert', 'hg.usebranchnames',
306 coreconfigitem('convert', 'hg.usebranchnames',
307 default=True,
307 default=True,
308 )
308 )
309 coreconfigitem('convert', 'ignoreancestorcheck',
309 coreconfigitem('convert', 'ignoreancestorcheck',
310 default=False,
310 default=False,
311 )
311 )
312 coreconfigitem('convert', 'localtimezone',
312 coreconfigitem('convert', 'localtimezone',
313 default=False,
313 default=False,
314 )
314 )
315 coreconfigitem('convert', 'p4.encoding',
315 coreconfigitem('convert', 'p4.encoding',
316 default=dynamicdefault,
316 default=dynamicdefault,
317 )
317 )
318 coreconfigitem('convert', 'p4.startrev',
318 coreconfigitem('convert', 'p4.startrev',
319 default=0,
319 default=0,
320 )
320 )
321 coreconfigitem('convert', 'skiptags',
321 coreconfigitem('convert', 'skiptags',
322 default=False,
322 default=False,
323 )
323 )
324 coreconfigitem('convert', 'svn.debugsvnlog',
324 coreconfigitem('convert', 'svn.debugsvnlog',
325 default=True,
325 default=True,
326 )
326 )
327 coreconfigitem('convert', 'svn.trunk',
327 coreconfigitem('convert', 'svn.trunk',
328 default=None,
328 default=None,
329 )
329 )
330 coreconfigitem('convert', 'svn.tags',
330 coreconfigitem('convert', 'svn.tags',
331 default=None,
331 default=None,
332 )
332 )
333 coreconfigitem('convert', 'svn.branches',
333 coreconfigitem('convert', 'svn.branches',
334 default=None,
334 default=None,
335 )
335 )
336 coreconfigitem('convert', 'svn.startrev',
336 coreconfigitem('convert', 'svn.startrev',
337 default=0,
337 default=0,
338 )
338 )
339 coreconfigitem('debug', 'dirstate.delaywrite',
339 coreconfigitem('debug', 'dirstate.delaywrite',
340 default=0,
340 default=0,
341 )
341 )
342 coreconfigitem('defaults', '.*',
342 coreconfigitem('defaults', '.*',
343 default=None,
343 default=None,
344 generic=True,
344 generic=True,
345 )
345 )
346 coreconfigitem('devel', 'all-warnings',
346 coreconfigitem('devel', 'all-warnings',
347 default=False,
347 default=False,
348 )
348 )
349 coreconfigitem('devel', 'bundle2.debug',
349 coreconfigitem('devel', 'bundle2.debug',
350 default=False,
350 default=False,
351 )
351 )
352 coreconfigitem('devel', 'bundle.delta',
352 coreconfigitem('devel', 'bundle.delta',
353 default='',
353 default='',
354 )
354 )
355 coreconfigitem('devel', 'cache-vfs',
355 coreconfigitem('devel', 'cache-vfs',
356 default=None,
356 default=None,
357 )
357 )
358 coreconfigitem('devel', 'check-locks',
358 coreconfigitem('devel', 'check-locks',
359 default=False,
359 default=False,
360 )
360 )
361 coreconfigitem('devel', 'check-relroot',
361 coreconfigitem('devel', 'check-relroot',
362 default=False,
362 default=False,
363 )
363 )
364 coreconfigitem('devel', 'default-date',
364 coreconfigitem('devel', 'default-date',
365 default=None,
365 default=None,
366 )
366 )
367 coreconfigitem('devel', 'deprec-warn',
367 coreconfigitem('devel', 'deprec-warn',
368 default=False,
368 default=False,
369 )
369 )
370 coreconfigitem('devel', 'disableloaddefaultcerts',
370 coreconfigitem('devel', 'disableloaddefaultcerts',
371 default=False,
371 default=False,
372 )
372 )
373 coreconfigitem('devel', 'warn-empty-changegroup',
373 coreconfigitem('devel', 'warn-empty-changegroup',
374 default=False,
374 default=False,
375 )
375 )
376 coreconfigitem('devel', 'legacy.exchange',
376 coreconfigitem('devel', 'legacy.exchange',
377 default=list,
377 default=list,
378 )
378 )
379 coreconfigitem('devel', 'servercafile',
379 coreconfigitem('devel', 'servercafile',
380 default='',
380 default='',
381 )
381 )
382 coreconfigitem('devel', 'serverexactprotocol',
382 coreconfigitem('devel', 'serverexactprotocol',
383 default='',
383 default='',
384 )
384 )
385 coreconfigitem('devel', 'serverrequirecert',
385 coreconfigitem('devel', 'serverrequirecert',
386 default=False,
386 default=False,
387 )
387 )
388 coreconfigitem('devel', 'strip-obsmarkers',
388 coreconfigitem('devel', 'strip-obsmarkers',
389 default=True,
389 default=True,
390 )
390 )
391 coreconfigitem('devel', 'warn-config',
391 coreconfigitem('devel', 'warn-config',
392 default=None,
392 default=None,
393 )
393 )
394 coreconfigitem('devel', 'warn-config-default',
394 coreconfigitem('devel', 'warn-config-default',
395 default=None,
395 default=None,
396 )
396 )
397 coreconfigitem('devel', 'user.obsmarker',
397 coreconfigitem('devel', 'user.obsmarker',
398 default=None,
398 default=None,
399 )
399 )
400 coreconfigitem('devel', 'warn-config-unknown',
400 coreconfigitem('devel', 'warn-config-unknown',
401 default=None,
401 default=None,
402 )
402 )
403 coreconfigitem('devel', 'debug.copies',
403 coreconfigitem('devel', 'debug.copies',
404 default=False,
404 default=False,
405 )
405 )
406 coreconfigitem('devel', 'debug.extensions',
406 coreconfigitem('devel', 'debug.extensions',
407 default=False,
407 default=False,
408 )
408 )
409 coreconfigitem('devel', 'debug.peer-request',
409 coreconfigitem('devel', 'debug.peer-request',
410 default=False,
410 default=False,
411 )
411 )
412 _registerdiffopts(section='diff')
412 _registerdiffopts(section='diff')
413 coreconfigitem('email', 'bcc',
413 coreconfigitem('email', 'bcc',
414 default=None,
414 default=None,
415 )
415 )
416 coreconfigitem('email', 'cc',
416 coreconfigitem('email', 'cc',
417 default=None,
417 default=None,
418 )
418 )
419 coreconfigitem('email', 'charsets',
419 coreconfigitem('email', 'charsets',
420 default=list,
420 default=list,
421 )
421 )
422 coreconfigitem('email', 'from',
422 coreconfigitem('email', 'from',
423 default=None,
423 default=None,
424 )
424 )
425 coreconfigitem('email', 'method',
425 coreconfigitem('email', 'method',
426 default='smtp',
426 default='smtp',
427 )
427 )
428 coreconfigitem('email', 'reply-to',
428 coreconfigitem('email', 'reply-to',
429 default=None,
429 default=None,
430 )
430 )
431 coreconfigitem('email', 'to',
431 coreconfigitem('email', 'to',
432 default=None,
432 default=None,
433 )
433 )
434 coreconfigitem('experimental', 'archivemetatemplate',
434 coreconfigitem('experimental', 'archivemetatemplate',
435 default=dynamicdefault,
435 default=dynamicdefault,
436 )
436 )
437 coreconfigitem('experimental', 'auto-publish',
437 coreconfigitem('experimental', 'auto-publish',
438 default='publish',
438 default='publish',
439 )
439 )
440 coreconfigitem('experimental', 'bundle-phases',
440 coreconfigitem('experimental', 'bundle-phases',
441 default=False,
441 default=False,
442 )
442 )
443 coreconfigitem('experimental', 'bundle2-advertise',
443 coreconfigitem('experimental', 'bundle2-advertise',
444 default=True,
444 default=True,
445 )
445 )
446 coreconfigitem('experimental', 'bundle2-output-capture',
446 coreconfigitem('experimental', 'bundle2-output-capture',
447 default=False,
447 default=False,
448 )
448 )
449 coreconfigitem('experimental', 'bundle2.pushback',
449 coreconfigitem('experimental', 'bundle2.pushback',
450 default=False,
450 default=False,
451 )
451 )
452 coreconfigitem('experimental', 'bundle2lazylocking',
452 coreconfigitem('experimental', 'bundle2lazylocking',
453 default=False,
453 default=False,
454 )
454 )
455 coreconfigitem('experimental', 'bundlecomplevel',
455 coreconfigitem('experimental', 'bundlecomplevel',
456 default=None,
456 default=None,
457 )
457 )
458 coreconfigitem('experimental', 'bundlecomplevel.bzip2',
458 coreconfigitem('experimental', 'bundlecomplevel.bzip2',
459 default=None,
459 default=None,
460 )
460 )
461 coreconfigitem('experimental', 'bundlecomplevel.gzip',
461 coreconfigitem('experimental', 'bundlecomplevel.gzip',
462 default=None,
462 default=None,
463 )
463 )
464 coreconfigitem('experimental', 'bundlecomplevel.none',
464 coreconfigitem('experimental', 'bundlecomplevel.none',
465 default=None,
465 default=None,
466 )
466 )
467 coreconfigitem('experimental', 'bundlecomplevel.zstd',
467 coreconfigitem('experimental', 'bundlecomplevel.zstd',
468 default=None,
468 default=None,
469 )
469 )
470 coreconfigitem('experimental', 'changegroup3',
470 coreconfigitem('experimental', 'changegroup3',
471 default=False,
471 default=False,
472 )
472 )
473 coreconfigitem('experimental', 'cleanup-as-archived',
473 coreconfigitem('experimental', 'cleanup-as-archived',
474 default=False,
474 default=False,
475 )
475 )
476 coreconfigitem('experimental', 'clientcompressionengines',
476 coreconfigitem('experimental', 'clientcompressionengines',
477 default=list,
477 default=list,
478 )
478 )
479 coreconfigitem('experimental', 'copytrace',
479 coreconfigitem('experimental', 'copytrace',
480 default='on',
480 default='on',
481 )
481 )
482 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
482 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
483 default=100,
483 default=100,
484 )
484 )
485 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
485 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
486 default=100,
486 default=100,
487 )
487 )
488 coreconfigitem('experimental', 'copies.read-from',
488 coreconfigitem('experimental', 'copies.read-from',
489 default="filelog-only",
489 default="filelog-only",
490 )
490 )
491 coreconfigitem('experimental', 'copies.write-to',
492 default='filelog-only',
493 )
491 coreconfigitem('experimental', 'crecordtest',
494 coreconfigitem('experimental', 'crecordtest',
492 default=None,
495 default=None,
493 )
496 )
494 coreconfigitem('experimental', 'directaccess',
497 coreconfigitem('experimental', 'directaccess',
495 default=False,
498 default=False,
496 )
499 )
497 coreconfigitem('experimental', 'directaccess.revnums',
500 coreconfigitem('experimental', 'directaccess.revnums',
498 default=False,
501 default=False,
499 )
502 )
500 coreconfigitem('experimental', 'editortmpinhg',
503 coreconfigitem('experimental', 'editortmpinhg',
501 default=False,
504 default=False,
502 )
505 )
503 coreconfigitem('experimental', 'evolution',
506 coreconfigitem('experimental', 'evolution',
504 default=list,
507 default=list,
505 )
508 )
506 coreconfigitem('experimental', 'evolution.allowdivergence',
509 coreconfigitem('experimental', 'evolution.allowdivergence',
507 default=False,
510 default=False,
508 alias=[('experimental', 'allowdivergence')]
511 alias=[('experimental', 'allowdivergence')]
509 )
512 )
510 coreconfigitem('experimental', 'evolution.allowunstable',
513 coreconfigitem('experimental', 'evolution.allowunstable',
511 default=None,
514 default=None,
512 )
515 )
513 coreconfigitem('experimental', 'evolution.createmarkers',
516 coreconfigitem('experimental', 'evolution.createmarkers',
514 default=None,
517 default=None,
515 )
518 )
516 coreconfigitem('experimental', 'evolution.effect-flags',
519 coreconfigitem('experimental', 'evolution.effect-flags',
517 default=True,
520 default=True,
518 alias=[('experimental', 'effect-flags')]
521 alias=[('experimental', 'effect-flags')]
519 )
522 )
520 coreconfigitem('experimental', 'evolution.exchange',
523 coreconfigitem('experimental', 'evolution.exchange',
521 default=None,
524 default=None,
522 )
525 )
523 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
526 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
524 default=False,
527 default=False,
525 )
528 )
526 coreconfigitem('experimental', 'evolution.report-instabilities',
529 coreconfigitem('experimental', 'evolution.report-instabilities',
527 default=True,
530 default=True,
528 )
531 )
529 coreconfigitem('experimental', 'evolution.track-operation',
532 coreconfigitem('experimental', 'evolution.track-operation',
530 default=True,
533 default=True,
531 )
534 )
532 coreconfigitem('experimental', 'maxdeltachainspan',
535 coreconfigitem('experimental', 'maxdeltachainspan',
533 default=-1,
536 default=-1,
534 )
537 )
535 coreconfigitem('experimental', 'mergetempdirprefix',
538 coreconfigitem('experimental', 'mergetempdirprefix',
536 default=None,
539 default=None,
537 )
540 )
538 coreconfigitem('experimental', 'mmapindexthreshold',
541 coreconfigitem('experimental', 'mmapindexthreshold',
539 default=None,
542 default=None,
540 )
543 )
541 coreconfigitem('experimental', 'narrow',
544 coreconfigitem('experimental', 'narrow',
542 default=False,
545 default=False,
543 )
546 )
544 coreconfigitem('experimental', 'nonnormalparanoidcheck',
547 coreconfigitem('experimental', 'nonnormalparanoidcheck',
545 default=False,
548 default=False,
546 )
549 )
547 coreconfigitem('experimental', 'exportableenviron',
550 coreconfigitem('experimental', 'exportableenviron',
548 default=list,
551 default=list,
549 )
552 )
550 coreconfigitem('experimental', 'extendedheader.index',
553 coreconfigitem('experimental', 'extendedheader.index',
551 default=None,
554 default=None,
552 )
555 )
553 coreconfigitem('experimental', 'extendedheader.similarity',
556 coreconfigitem('experimental', 'extendedheader.similarity',
554 default=False,
557 default=False,
555 )
558 )
556 coreconfigitem('experimental', 'graphshorten',
559 coreconfigitem('experimental', 'graphshorten',
557 default=False,
560 default=False,
558 )
561 )
559 coreconfigitem('experimental', 'graphstyle.parent',
562 coreconfigitem('experimental', 'graphstyle.parent',
560 default=dynamicdefault,
563 default=dynamicdefault,
561 )
564 )
562 coreconfigitem('experimental', 'graphstyle.missing',
565 coreconfigitem('experimental', 'graphstyle.missing',
563 default=dynamicdefault,
566 default=dynamicdefault,
564 )
567 )
565 coreconfigitem('experimental', 'graphstyle.grandparent',
568 coreconfigitem('experimental', 'graphstyle.grandparent',
566 default=dynamicdefault,
569 default=dynamicdefault,
567 )
570 )
568 coreconfigitem('experimental', 'hook-track-tags',
571 coreconfigitem('experimental', 'hook-track-tags',
569 default=False,
572 default=False,
570 )
573 )
571 coreconfigitem('experimental', 'httppeer.advertise-v2',
574 coreconfigitem('experimental', 'httppeer.advertise-v2',
572 default=False,
575 default=False,
573 )
576 )
574 coreconfigitem('experimental', 'httppeer.v2-encoder-order',
577 coreconfigitem('experimental', 'httppeer.v2-encoder-order',
575 default=None,
578 default=None,
576 )
579 )
577 coreconfigitem('experimental', 'httppostargs',
580 coreconfigitem('experimental', 'httppostargs',
578 default=False,
581 default=False,
579 )
582 )
580 coreconfigitem('experimental', 'mergedriver',
583 coreconfigitem('experimental', 'mergedriver',
581 default=None,
584 default=None,
582 )
585 )
583 coreconfigitem('experimental', 'nointerrupt', default=False)
586 coreconfigitem('experimental', 'nointerrupt', default=False)
584 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
587 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
585
588
586 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
589 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
587 default=False,
590 default=False,
588 )
591 )
589 coreconfigitem('experimental', 'remotenames',
592 coreconfigitem('experimental', 'remotenames',
590 default=False,
593 default=False,
591 )
594 )
592 coreconfigitem('experimental', 'removeemptydirs',
595 coreconfigitem('experimental', 'removeemptydirs',
593 default=True,
596 default=True,
594 )
597 )
595 coreconfigitem('experimental', 'revert.interactive.select-to-keep',
598 coreconfigitem('experimental', 'revert.interactive.select-to-keep',
596 default=False,
599 default=False,
597 )
600 )
598 coreconfigitem('experimental', 'revisions.prefixhexnode',
601 coreconfigitem('experimental', 'revisions.prefixhexnode',
599 default=False,
602 default=False,
600 )
603 )
601 coreconfigitem('experimental', 'revlogv2',
604 coreconfigitem('experimental', 'revlogv2',
602 default=None,
605 default=None,
603 )
606 )
604 coreconfigitem('experimental', 'revisions.disambiguatewithin',
607 coreconfigitem('experimental', 'revisions.disambiguatewithin',
605 default=None,
608 default=None,
606 )
609 )
607 coreconfigitem('experimental', 'server.filesdata.recommended-batch-size',
610 coreconfigitem('experimental', 'server.filesdata.recommended-batch-size',
608 default=50000,
611 default=50000,
609 )
612 )
610 coreconfigitem('experimental', 'server.manifestdata.recommended-batch-size',
613 coreconfigitem('experimental', 'server.manifestdata.recommended-batch-size',
611 default=100000,
614 default=100000,
612 )
615 )
613 coreconfigitem('experimental', 'server.stream-narrow-clones',
616 coreconfigitem('experimental', 'server.stream-narrow-clones',
614 default=False,
617 default=False,
615 )
618 )
616 coreconfigitem('experimental', 'single-head-per-branch',
619 coreconfigitem('experimental', 'single-head-per-branch',
617 default=False,
620 default=False,
618 )
621 )
619 coreconfigitem('experimental', 'sshserver.support-v2',
622 coreconfigitem('experimental', 'sshserver.support-v2',
620 default=False,
623 default=False,
621 )
624 )
622 coreconfigitem('experimental', 'sparse-read',
625 coreconfigitem('experimental', 'sparse-read',
623 default=False,
626 default=False,
624 )
627 )
625 coreconfigitem('experimental', 'sparse-read.density-threshold',
628 coreconfigitem('experimental', 'sparse-read.density-threshold',
626 default=0.50,
629 default=0.50,
627 )
630 )
628 coreconfigitem('experimental', 'sparse-read.min-gap-size',
631 coreconfigitem('experimental', 'sparse-read.min-gap-size',
629 default='65K',
632 default='65K',
630 )
633 )
631 coreconfigitem('experimental', 'treemanifest',
634 coreconfigitem('experimental', 'treemanifest',
632 default=False,
635 default=False,
633 )
636 )
634 coreconfigitem('experimental', 'update.atomic-file',
637 coreconfigitem('experimental', 'update.atomic-file',
635 default=False,
638 default=False,
636 )
639 )
637 coreconfigitem('experimental', 'sshpeer.advertise-v2',
640 coreconfigitem('experimental', 'sshpeer.advertise-v2',
638 default=False,
641 default=False,
639 )
642 )
640 coreconfigitem('experimental', 'web.apiserver',
643 coreconfigitem('experimental', 'web.apiserver',
641 default=False,
644 default=False,
642 )
645 )
643 coreconfigitem('experimental', 'web.api.http-v2',
646 coreconfigitem('experimental', 'web.api.http-v2',
644 default=False,
647 default=False,
645 )
648 )
646 coreconfigitem('experimental', 'web.api.debugreflect',
649 coreconfigitem('experimental', 'web.api.debugreflect',
647 default=False,
650 default=False,
648 )
651 )
649 coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
652 coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
650 default=False,
653 default=False,
651 )
654 )
652 coreconfigitem('experimental', 'xdiff',
655 coreconfigitem('experimental', 'xdiff',
653 default=False,
656 default=False,
654 )
657 )
655 coreconfigitem('extensions', '.*',
658 coreconfigitem('extensions', '.*',
656 default=None,
659 default=None,
657 generic=True,
660 generic=True,
658 )
661 )
659 coreconfigitem('extdata', '.*',
662 coreconfigitem('extdata', '.*',
660 default=None,
663 default=None,
661 generic=True,
664 generic=True,
662 )
665 )
663 coreconfigitem('format', 'chunkcachesize',
666 coreconfigitem('format', 'chunkcachesize',
664 default=None,
667 default=None,
665 )
668 )
666 coreconfigitem('format', 'dotencode',
669 coreconfigitem('format', 'dotencode',
667 default=True,
670 default=True,
668 )
671 )
669 coreconfigitem('format', 'generaldelta',
672 coreconfigitem('format', 'generaldelta',
670 default=False,
673 default=False,
671 )
674 )
672 coreconfigitem('format', 'manifestcachesize',
675 coreconfigitem('format', 'manifestcachesize',
673 default=None,
676 default=None,
674 )
677 )
675 coreconfigitem('format', 'maxchainlen',
678 coreconfigitem('format', 'maxchainlen',
676 default=dynamicdefault,
679 default=dynamicdefault,
677 )
680 )
678 coreconfigitem('format', 'obsstore-version',
681 coreconfigitem('format', 'obsstore-version',
679 default=None,
682 default=None,
680 )
683 )
681 coreconfigitem('format', 'sparse-revlog',
684 coreconfigitem('format', 'sparse-revlog',
682 default=True,
685 default=True,
683 )
686 )
684 coreconfigitem('format', 'revlog-compression',
687 coreconfigitem('format', 'revlog-compression',
685 default='zlib',
688 default='zlib',
686 alias=[('experimental', 'format.compression')]
689 alias=[('experimental', 'format.compression')]
687 )
690 )
688 coreconfigitem('format', 'usefncache',
691 coreconfigitem('format', 'usefncache',
689 default=True,
692 default=True,
690 )
693 )
691 coreconfigitem('format', 'usegeneraldelta',
694 coreconfigitem('format', 'usegeneraldelta',
692 default=True,
695 default=True,
693 )
696 )
694 coreconfigitem('format', 'usestore',
697 coreconfigitem('format', 'usestore',
695 default=True,
698 default=True,
696 )
699 )
697 coreconfigitem('format', 'internal-phase',
700 coreconfigitem('format', 'internal-phase',
698 default=False,
701 default=False,
699 )
702 )
700 coreconfigitem('fsmonitor', 'warn_when_unused',
703 coreconfigitem('fsmonitor', 'warn_when_unused',
701 default=True,
704 default=True,
702 )
705 )
703 coreconfigitem('fsmonitor', 'warn_update_file_count',
706 coreconfigitem('fsmonitor', 'warn_update_file_count',
704 default=50000,
707 default=50000,
705 )
708 )
706 coreconfigitem('help', br'hidden-command\..*',
709 coreconfigitem('help', br'hidden-command\..*',
707 default=False,
710 default=False,
708 generic=True,
711 generic=True,
709 )
712 )
710 coreconfigitem('help', br'hidden-topic\..*',
713 coreconfigitem('help', br'hidden-topic\..*',
711 default=False,
714 default=False,
712 generic=True,
715 generic=True,
713 )
716 )
714 coreconfigitem('hooks', '.*',
717 coreconfigitem('hooks', '.*',
715 default=dynamicdefault,
718 default=dynamicdefault,
716 generic=True,
719 generic=True,
717 )
720 )
718 coreconfigitem('hgweb-paths', '.*',
721 coreconfigitem('hgweb-paths', '.*',
719 default=list,
722 default=list,
720 generic=True,
723 generic=True,
721 )
724 )
722 coreconfigitem('hostfingerprints', '.*',
725 coreconfigitem('hostfingerprints', '.*',
723 default=list,
726 default=list,
724 generic=True,
727 generic=True,
725 )
728 )
726 coreconfigitem('hostsecurity', 'ciphers',
729 coreconfigitem('hostsecurity', 'ciphers',
727 default=None,
730 default=None,
728 )
731 )
729 coreconfigitem('hostsecurity', 'disabletls10warning',
732 coreconfigitem('hostsecurity', 'disabletls10warning',
730 default=False,
733 default=False,
731 )
734 )
732 coreconfigitem('hostsecurity', 'minimumprotocol',
735 coreconfigitem('hostsecurity', 'minimumprotocol',
733 default=dynamicdefault,
736 default=dynamicdefault,
734 )
737 )
735 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
738 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
736 default=dynamicdefault,
739 default=dynamicdefault,
737 generic=True,
740 generic=True,
738 )
741 )
739 coreconfigitem('hostsecurity', '.*:ciphers$',
742 coreconfigitem('hostsecurity', '.*:ciphers$',
740 default=dynamicdefault,
743 default=dynamicdefault,
741 generic=True,
744 generic=True,
742 )
745 )
743 coreconfigitem('hostsecurity', '.*:fingerprints$',
746 coreconfigitem('hostsecurity', '.*:fingerprints$',
744 default=list,
747 default=list,
745 generic=True,
748 generic=True,
746 )
749 )
747 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
750 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
748 default=None,
751 default=None,
749 generic=True,
752 generic=True,
750 )
753 )
751
754
752 coreconfigitem('http_proxy', 'always',
755 coreconfigitem('http_proxy', 'always',
753 default=False,
756 default=False,
754 )
757 )
755 coreconfigitem('http_proxy', 'host',
758 coreconfigitem('http_proxy', 'host',
756 default=None,
759 default=None,
757 )
760 )
758 coreconfigitem('http_proxy', 'no',
761 coreconfigitem('http_proxy', 'no',
759 default=list,
762 default=list,
760 )
763 )
761 coreconfigitem('http_proxy', 'passwd',
764 coreconfigitem('http_proxy', 'passwd',
762 default=None,
765 default=None,
763 )
766 )
764 coreconfigitem('http_proxy', 'user',
767 coreconfigitem('http_proxy', 'user',
765 default=None,
768 default=None,
766 )
769 )
767
770
768 coreconfigitem('http', 'timeout',
771 coreconfigitem('http', 'timeout',
769 default=None,
772 default=None,
770 )
773 )
771
774
772 coreconfigitem('logtoprocess', 'commandexception',
775 coreconfigitem('logtoprocess', 'commandexception',
773 default=None,
776 default=None,
774 )
777 )
775 coreconfigitem('logtoprocess', 'commandfinish',
778 coreconfigitem('logtoprocess', 'commandfinish',
776 default=None,
779 default=None,
777 )
780 )
778 coreconfigitem('logtoprocess', 'command',
781 coreconfigitem('logtoprocess', 'command',
779 default=None,
782 default=None,
780 )
783 )
781 coreconfigitem('logtoprocess', 'develwarn',
784 coreconfigitem('logtoprocess', 'develwarn',
782 default=None,
785 default=None,
783 )
786 )
784 coreconfigitem('logtoprocess', 'uiblocked',
787 coreconfigitem('logtoprocess', 'uiblocked',
785 default=None,
788 default=None,
786 )
789 )
787 coreconfigitem('merge', 'checkunknown',
790 coreconfigitem('merge', 'checkunknown',
788 default='abort',
791 default='abort',
789 )
792 )
790 coreconfigitem('merge', 'checkignored',
793 coreconfigitem('merge', 'checkignored',
791 default='abort',
794 default='abort',
792 )
795 )
793 coreconfigitem('experimental', 'merge.checkpathconflicts',
796 coreconfigitem('experimental', 'merge.checkpathconflicts',
794 default=False,
797 default=False,
795 )
798 )
796 coreconfigitem('merge', 'followcopies',
799 coreconfigitem('merge', 'followcopies',
797 default=True,
800 default=True,
798 )
801 )
799 coreconfigitem('merge', 'on-failure',
802 coreconfigitem('merge', 'on-failure',
800 default='continue',
803 default='continue',
801 )
804 )
802 coreconfigitem('merge', 'preferancestor',
805 coreconfigitem('merge', 'preferancestor',
803 default=lambda: ['*'],
806 default=lambda: ['*'],
804 )
807 )
805 coreconfigitem('merge', 'strict-capability-check',
808 coreconfigitem('merge', 'strict-capability-check',
806 default=False,
809 default=False,
807 )
810 )
808 coreconfigitem('merge-tools', '.*',
811 coreconfigitem('merge-tools', '.*',
809 default=None,
812 default=None,
810 generic=True,
813 generic=True,
811 )
814 )
812 coreconfigitem('merge-tools', br'.*\.args$',
815 coreconfigitem('merge-tools', br'.*\.args$',
813 default="$local $base $other",
816 default="$local $base $other",
814 generic=True,
817 generic=True,
815 priority=-1,
818 priority=-1,
816 )
819 )
817 coreconfigitem('merge-tools', br'.*\.binary$',
820 coreconfigitem('merge-tools', br'.*\.binary$',
818 default=False,
821 default=False,
819 generic=True,
822 generic=True,
820 priority=-1,
823 priority=-1,
821 )
824 )
822 coreconfigitem('merge-tools', br'.*\.check$',
825 coreconfigitem('merge-tools', br'.*\.check$',
823 default=list,
826 default=list,
824 generic=True,
827 generic=True,
825 priority=-1,
828 priority=-1,
826 )
829 )
827 coreconfigitem('merge-tools', br'.*\.checkchanged$',
830 coreconfigitem('merge-tools', br'.*\.checkchanged$',
828 default=False,
831 default=False,
829 generic=True,
832 generic=True,
830 priority=-1,
833 priority=-1,
831 )
834 )
832 coreconfigitem('merge-tools', br'.*\.executable$',
835 coreconfigitem('merge-tools', br'.*\.executable$',
833 default=dynamicdefault,
836 default=dynamicdefault,
834 generic=True,
837 generic=True,
835 priority=-1,
838 priority=-1,
836 )
839 )
837 coreconfigitem('merge-tools', br'.*\.fixeol$',
840 coreconfigitem('merge-tools', br'.*\.fixeol$',
838 default=False,
841 default=False,
839 generic=True,
842 generic=True,
840 priority=-1,
843 priority=-1,
841 )
844 )
842 coreconfigitem('merge-tools', br'.*\.gui$',
845 coreconfigitem('merge-tools', br'.*\.gui$',
843 default=False,
846 default=False,
844 generic=True,
847 generic=True,
845 priority=-1,
848 priority=-1,
846 )
849 )
847 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
850 coreconfigitem('merge-tools', br'.*\.mergemarkers$',
848 default='basic',
851 default='basic',
849 generic=True,
852 generic=True,
850 priority=-1,
853 priority=-1,
851 )
854 )
852 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
855 coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
853 default=dynamicdefault, # take from ui.mergemarkertemplate
856 default=dynamicdefault, # take from ui.mergemarkertemplate
854 generic=True,
857 generic=True,
855 priority=-1,
858 priority=-1,
856 )
859 )
857 coreconfigitem('merge-tools', br'.*\.priority$',
860 coreconfigitem('merge-tools', br'.*\.priority$',
858 default=0,
861 default=0,
859 generic=True,
862 generic=True,
860 priority=-1,
863 priority=-1,
861 )
864 )
862 coreconfigitem('merge-tools', br'.*\.premerge$',
865 coreconfigitem('merge-tools', br'.*\.premerge$',
863 default=dynamicdefault,
866 default=dynamicdefault,
864 generic=True,
867 generic=True,
865 priority=-1,
868 priority=-1,
866 )
869 )
867 coreconfigitem('merge-tools', br'.*\.symlink$',
870 coreconfigitem('merge-tools', br'.*\.symlink$',
868 default=False,
871 default=False,
869 generic=True,
872 generic=True,
870 priority=-1,
873 priority=-1,
871 )
874 )
872 coreconfigitem('pager', 'attend-.*',
875 coreconfigitem('pager', 'attend-.*',
873 default=dynamicdefault,
876 default=dynamicdefault,
874 generic=True,
877 generic=True,
875 )
878 )
876 coreconfigitem('pager', 'ignore',
879 coreconfigitem('pager', 'ignore',
877 default=list,
880 default=list,
878 )
881 )
879 coreconfigitem('pager', 'pager',
882 coreconfigitem('pager', 'pager',
880 default=dynamicdefault,
883 default=dynamicdefault,
881 )
884 )
882 coreconfigitem('patch', 'eol',
885 coreconfigitem('patch', 'eol',
883 default='strict',
886 default='strict',
884 )
887 )
885 coreconfigitem('patch', 'fuzz',
888 coreconfigitem('patch', 'fuzz',
886 default=2,
889 default=2,
887 )
890 )
888 coreconfigitem('paths', 'default',
891 coreconfigitem('paths', 'default',
889 default=None,
892 default=None,
890 )
893 )
891 coreconfigitem('paths', 'default-push',
894 coreconfigitem('paths', 'default-push',
892 default=None,
895 default=None,
893 )
896 )
894 coreconfigitem('paths', '.*',
897 coreconfigitem('paths', '.*',
895 default=None,
898 default=None,
896 generic=True,
899 generic=True,
897 )
900 )
898 coreconfigitem('phases', 'checksubrepos',
901 coreconfigitem('phases', 'checksubrepos',
899 default='follow',
902 default='follow',
900 )
903 )
901 coreconfigitem('phases', 'new-commit',
904 coreconfigitem('phases', 'new-commit',
902 default='draft',
905 default='draft',
903 )
906 )
904 coreconfigitem('phases', 'publish',
907 coreconfigitem('phases', 'publish',
905 default=True,
908 default=True,
906 )
909 )
907 coreconfigitem('profiling', 'enabled',
910 coreconfigitem('profiling', 'enabled',
908 default=False,
911 default=False,
909 )
912 )
910 coreconfigitem('profiling', 'format',
913 coreconfigitem('profiling', 'format',
911 default='text',
914 default='text',
912 )
915 )
913 coreconfigitem('profiling', 'freq',
916 coreconfigitem('profiling', 'freq',
914 default=1000,
917 default=1000,
915 )
918 )
916 coreconfigitem('profiling', 'limit',
919 coreconfigitem('profiling', 'limit',
917 default=30,
920 default=30,
918 )
921 )
919 coreconfigitem('profiling', 'nested',
922 coreconfigitem('profiling', 'nested',
920 default=0,
923 default=0,
921 )
924 )
922 coreconfigitem('profiling', 'output',
925 coreconfigitem('profiling', 'output',
923 default=None,
926 default=None,
924 )
927 )
925 coreconfigitem('profiling', 'showmax',
928 coreconfigitem('profiling', 'showmax',
926 default=0.999,
929 default=0.999,
927 )
930 )
928 coreconfigitem('profiling', 'showmin',
931 coreconfigitem('profiling', 'showmin',
929 default=dynamicdefault,
932 default=dynamicdefault,
930 )
933 )
931 coreconfigitem('profiling', 'sort',
934 coreconfigitem('profiling', 'sort',
932 default='inlinetime',
935 default='inlinetime',
933 )
936 )
934 coreconfigitem('profiling', 'statformat',
937 coreconfigitem('profiling', 'statformat',
935 default='hotpath',
938 default='hotpath',
936 )
939 )
937 coreconfigitem('profiling', 'time-track',
940 coreconfigitem('profiling', 'time-track',
938 default=dynamicdefault,
941 default=dynamicdefault,
939 )
942 )
940 coreconfigitem('profiling', 'type',
943 coreconfigitem('profiling', 'type',
941 default='stat',
944 default='stat',
942 )
945 )
943 coreconfigitem('progress', 'assume-tty',
946 coreconfigitem('progress', 'assume-tty',
944 default=False,
947 default=False,
945 )
948 )
946 coreconfigitem('progress', 'changedelay',
949 coreconfigitem('progress', 'changedelay',
947 default=1,
950 default=1,
948 )
951 )
949 coreconfigitem('progress', 'clear-complete',
952 coreconfigitem('progress', 'clear-complete',
950 default=True,
953 default=True,
951 )
954 )
952 coreconfigitem('progress', 'debug',
955 coreconfigitem('progress', 'debug',
953 default=False,
956 default=False,
954 )
957 )
955 coreconfigitem('progress', 'delay',
958 coreconfigitem('progress', 'delay',
956 default=3,
959 default=3,
957 )
960 )
958 coreconfigitem('progress', 'disable',
961 coreconfigitem('progress', 'disable',
959 default=False,
962 default=False,
960 )
963 )
961 coreconfigitem('progress', 'estimateinterval',
964 coreconfigitem('progress', 'estimateinterval',
962 default=60.0,
965 default=60.0,
963 )
966 )
964 coreconfigitem('progress', 'format',
967 coreconfigitem('progress', 'format',
965 default=lambda: ['topic', 'bar', 'number', 'estimate'],
968 default=lambda: ['topic', 'bar', 'number', 'estimate'],
966 )
969 )
967 coreconfigitem('progress', 'refresh',
970 coreconfigitem('progress', 'refresh',
968 default=0.1,
971 default=0.1,
969 )
972 )
970 coreconfigitem('progress', 'width',
973 coreconfigitem('progress', 'width',
971 default=dynamicdefault,
974 default=dynamicdefault,
972 )
975 )
973 coreconfigitem('push', 'pushvars.server',
976 coreconfigitem('push', 'pushvars.server',
974 default=False,
977 default=False,
975 )
978 )
976 coreconfigitem('rewrite', 'backup-bundle',
979 coreconfigitem('rewrite', 'backup-bundle',
977 default=True,
980 default=True,
978 alias=[('ui', 'history-editing-backup')],
981 alias=[('ui', 'history-editing-backup')],
979 )
982 )
980 coreconfigitem('rewrite', 'update-timestamp',
983 coreconfigitem('rewrite', 'update-timestamp',
981 default=False,
984 default=False,
982 )
985 )
983 coreconfigitem('storage', 'new-repo-backend',
986 coreconfigitem('storage', 'new-repo-backend',
984 default='revlogv1',
987 default='revlogv1',
985 )
988 )
986 coreconfigitem('storage', 'revlog.optimize-delta-parent-choice',
989 coreconfigitem('storage', 'revlog.optimize-delta-parent-choice',
987 default=True,
990 default=True,
988 alias=[('format', 'aggressivemergedeltas')],
991 alias=[('format', 'aggressivemergedeltas')],
989 )
992 )
990 coreconfigitem('storage', 'revlog.reuse-external-delta',
993 coreconfigitem('storage', 'revlog.reuse-external-delta',
991 default=True,
994 default=True,
992 )
995 )
993 coreconfigitem('storage', 'revlog.reuse-external-delta-parent',
996 coreconfigitem('storage', 'revlog.reuse-external-delta-parent',
994 default=None,
997 default=None,
995 )
998 )
996 coreconfigitem('storage', 'revlog.zlib.level',
999 coreconfigitem('storage', 'revlog.zlib.level',
997 default=None,
1000 default=None,
998 )
1001 )
999 coreconfigitem('storage', 'revlog.zstd.level',
1002 coreconfigitem('storage', 'revlog.zstd.level',
1000 default=None,
1003 default=None,
1001 )
1004 )
1002 coreconfigitem('server', 'bookmarks-pushkey-compat',
1005 coreconfigitem('server', 'bookmarks-pushkey-compat',
1003 default=True,
1006 default=True,
1004 )
1007 )
1005 coreconfigitem('server', 'bundle1',
1008 coreconfigitem('server', 'bundle1',
1006 default=True,
1009 default=True,
1007 )
1010 )
1008 coreconfigitem('server', 'bundle1gd',
1011 coreconfigitem('server', 'bundle1gd',
1009 default=None,
1012 default=None,
1010 )
1013 )
1011 coreconfigitem('server', 'bundle1.pull',
1014 coreconfigitem('server', 'bundle1.pull',
1012 default=None,
1015 default=None,
1013 )
1016 )
1014 coreconfigitem('server', 'bundle1gd.pull',
1017 coreconfigitem('server', 'bundle1gd.pull',
1015 default=None,
1018 default=None,
1016 )
1019 )
1017 coreconfigitem('server', 'bundle1.push',
1020 coreconfigitem('server', 'bundle1.push',
1018 default=None,
1021 default=None,
1019 )
1022 )
1020 coreconfigitem('server', 'bundle1gd.push',
1023 coreconfigitem('server', 'bundle1gd.push',
1021 default=None,
1024 default=None,
1022 )
1025 )
1023 coreconfigitem('server', 'bundle2.stream',
1026 coreconfigitem('server', 'bundle2.stream',
1024 default=True,
1027 default=True,
1025 alias=[('experimental', 'bundle2.stream')]
1028 alias=[('experimental', 'bundle2.stream')]
1026 )
1029 )
1027 coreconfigitem('server', 'compressionengines',
1030 coreconfigitem('server', 'compressionengines',
1028 default=list,
1031 default=list,
1029 )
1032 )
1030 coreconfigitem('server', 'concurrent-push-mode',
1033 coreconfigitem('server', 'concurrent-push-mode',
1031 default='strict',
1034 default='strict',
1032 )
1035 )
1033 coreconfigitem('server', 'disablefullbundle',
1036 coreconfigitem('server', 'disablefullbundle',
1034 default=False,
1037 default=False,
1035 )
1038 )
1036 coreconfigitem('server', 'maxhttpheaderlen',
1039 coreconfigitem('server', 'maxhttpheaderlen',
1037 default=1024,
1040 default=1024,
1038 )
1041 )
1039 coreconfigitem('server', 'pullbundle',
1042 coreconfigitem('server', 'pullbundle',
1040 default=False,
1043 default=False,
1041 )
1044 )
1042 coreconfigitem('server', 'preferuncompressed',
1045 coreconfigitem('server', 'preferuncompressed',
1043 default=False,
1046 default=False,
1044 )
1047 )
1045 coreconfigitem('server', 'streamunbundle',
1048 coreconfigitem('server', 'streamunbundle',
1046 default=False,
1049 default=False,
1047 )
1050 )
1048 coreconfigitem('server', 'uncompressed',
1051 coreconfigitem('server', 'uncompressed',
1049 default=True,
1052 default=True,
1050 )
1053 )
1051 coreconfigitem('server', 'uncompressedallowsecret',
1054 coreconfigitem('server', 'uncompressedallowsecret',
1052 default=False,
1055 default=False,
1053 )
1056 )
1054 coreconfigitem('server', 'view',
1057 coreconfigitem('server', 'view',
1055 default='served',
1058 default='served',
1056 )
1059 )
1057 coreconfigitem('server', 'validate',
1060 coreconfigitem('server', 'validate',
1058 default=False,
1061 default=False,
1059 )
1062 )
1060 coreconfigitem('server', 'zliblevel',
1063 coreconfigitem('server', 'zliblevel',
1061 default=-1,
1064 default=-1,
1062 )
1065 )
1063 coreconfigitem('server', 'zstdlevel',
1066 coreconfigitem('server', 'zstdlevel',
1064 default=3,
1067 default=3,
1065 )
1068 )
1066 coreconfigitem('share', 'pool',
1069 coreconfigitem('share', 'pool',
1067 default=None,
1070 default=None,
1068 )
1071 )
1069 coreconfigitem('share', 'poolnaming',
1072 coreconfigitem('share', 'poolnaming',
1070 default='identity',
1073 default='identity',
1071 )
1074 )
1072 coreconfigitem('smtp', 'host',
1075 coreconfigitem('smtp', 'host',
1073 default=None,
1076 default=None,
1074 )
1077 )
1075 coreconfigitem('smtp', 'local_hostname',
1078 coreconfigitem('smtp', 'local_hostname',
1076 default=None,
1079 default=None,
1077 )
1080 )
1078 coreconfigitem('smtp', 'password',
1081 coreconfigitem('smtp', 'password',
1079 default=None,
1082 default=None,
1080 )
1083 )
1081 coreconfigitem('smtp', 'port',
1084 coreconfigitem('smtp', 'port',
1082 default=dynamicdefault,
1085 default=dynamicdefault,
1083 )
1086 )
1084 coreconfigitem('smtp', 'tls',
1087 coreconfigitem('smtp', 'tls',
1085 default='none',
1088 default='none',
1086 )
1089 )
1087 coreconfigitem('smtp', 'username',
1090 coreconfigitem('smtp', 'username',
1088 default=None,
1091 default=None,
1089 )
1092 )
1090 coreconfigitem('sparse', 'missingwarning',
1093 coreconfigitem('sparse', 'missingwarning',
1091 default=True,
1094 default=True,
1092 )
1095 )
1093 coreconfigitem('subrepos', 'allowed',
1096 coreconfigitem('subrepos', 'allowed',
1094 default=dynamicdefault, # to make backporting simpler
1097 default=dynamicdefault, # to make backporting simpler
1095 )
1098 )
1096 coreconfigitem('subrepos', 'hg:allowed',
1099 coreconfigitem('subrepos', 'hg:allowed',
1097 default=dynamicdefault,
1100 default=dynamicdefault,
1098 )
1101 )
1099 coreconfigitem('subrepos', 'git:allowed',
1102 coreconfigitem('subrepos', 'git:allowed',
1100 default=dynamicdefault,
1103 default=dynamicdefault,
1101 )
1104 )
1102 coreconfigitem('subrepos', 'svn:allowed',
1105 coreconfigitem('subrepos', 'svn:allowed',
1103 default=dynamicdefault,
1106 default=dynamicdefault,
1104 )
1107 )
1105 coreconfigitem('templates', '.*',
1108 coreconfigitem('templates', '.*',
1106 default=None,
1109 default=None,
1107 generic=True,
1110 generic=True,
1108 )
1111 )
1109 coreconfigitem('templateconfig', '.*',
1112 coreconfigitem('templateconfig', '.*',
1110 default=dynamicdefault,
1113 default=dynamicdefault,
1111 generic=True,
1114 generic=True,
1112 )
1115 )
1113 coreconfigitem('trusted', 'groups',
1116 coreconfigitem('trusted', 'groups',
1114 default=list,
1117 default=list,
1115 )
1118 )
1116 coreconfigitem('trusted', 'users',
1119 coreconfigitem('trusted', 'users',
1117 default=list,
1120 default=list,
1118 )
1121 )
1119 coreconfigitem('ui', '_usedassubrepo',
1122 coreconfigitem('ui', '_usedassubrepo',
1120 default=False,
1123 default=False,
1121 )
1124 )
1122 coreconfigitem('ui', 'allowemptycommit',
1125 coreconfigitem('ui', 'allowemptycommit',
1123 default=False,
1126 default=False,
1124 )
1127 )
1125 coreconfigitem('ui', 'archivemeta',
1128 coreconfigitem('ui', 'archivemeta',
1126 default=True,
1129 default=True,
1127 )
1130 )
1128 coreconfigitem('ui', 'askusername',
1131 coreconfigitem('ui', 'askusername',
1129 default=False,
1132 default=False,
1130 )
1133 )
1131 coreconfigitem('ui', 'clonebundlefallback',
1134 coreconfigitem('ui', 'clonebundlefallback',
1132 default=False,
1135 default=False,
1133 )
1136 )
1134 coreconfigitem('ui', 'clonebundleprefers',
1137 coreconfigitem('ui', 'clonebundleprefers',
1135 default=list,
1138 default=list,
1136 )
1139 )
1137 coreconfigitem('ui', 'clonebundles',
1140 coreconfigitem('ui', 'clonebundles',
1138 default=True,
1141 default=True,
1139 )
1142 )
1140 coreconfigitem('ui', 'color',
1143 coreconfigitem('ui', 'color',
1141 default='auto',
1144 default='auto',
1142 )
1145 )
1143 coreconfigitem('ui', 'commitsubrepos',
1146 coreconfigitem('ui', 'commitsubrepos',
1144 default=False,
1147 default=False,
1145 )
1148 )
1146 coreconfigitem('ui', 'debug',
1149 coreconfigitem('ui', 'debug',
1147 default=False,
1150 default=False,
1148 )
1151 )
1149 coreconfigitem('ui', 'debugger',
1152 coreconfigitem('ui', 'debugger',
1150 default=None,
1153 default=None,
1151 )
1154 )
1152 coreconfigitem('ui', 'editor',
1155 coreconfigitem('ui', 'editor',
1153 default=dynamicdefault,
1156 default=dynamicdefault,
1154 )
1157 )
1155 coreconfigitem('ui', 'fallbackencoding',
1158 coreconfigitem('ui', 'fallbackencoding',
1156 default=None,
1159 default=None,
1157 )
1160 )
1158 coreconfigitem('ui', 'forcecwd',
1161 coreconfigitem('ui', 'forcecwd',
1159 default=None,
1162 default=None,
1160 )
1163 )
1161 coreconfigitem('ui', 'forcemerge',
1164 coreconfigitem('ui', 'forcemerge',
1162 default=None,
1165 default=None,
1163 )
1166 )
1164 coreconfigitem('ui', 'formatdebug',
1167 coreconfigitem('ui', 'formatdebug',
1165 default=False,
1168 default=False,
1166 )
1169 )
1167 coreconfigitem('ui', 'formatjson',
1170 coreconfigitem('ui', 'formatjson',
1168 default=False,
1171 default=False,
1169 )
1172 )
1170 coreconfigitem('ui', 'formatted',
1173 coreconfigitem('ui', 'formatted',
1171 default=None,
1174 default=None,
1172 )
1175 )
1173 coreconfigitem('ui', 'graphnodetemplate',
1176 coreconfigitem('ui', 'graphnodetemplate',
1174 default=None,
1177 default=None,
1175 )
1178 )
1176 coreconfigitem('ui', 'interactive',
1179 coreconfigitem('ui', 'interactive',
1177 default=None,
1180 default=None,
1178 )
1181 )
1179 coreconfigitem('ui', 'interface',
1182 coreconfigitem('ui', 'interface',
1180 default=None,
1183 default=None,
1181 )
1184 )
1182 coreconfigitem('ui', 'interface.chunkselector',
1185 coreconfigitem('ui', 'interface.chunkselector',
1183 default=None,
1186 default=None,
1184 )
1187 )
1185 coreconfigitem('ui', 'large-file-limit',
1188 coreconfigitem('ui', 'large-file-limit',
1186 default=10000000,
1189 default=10000000,
1187 )
1190 )
1188 coreconfigitem('ui', 'logblockedtimes',
1191 coreconfigitem('ui', 'logblockedtimes',
1189 default=False,
1192 default=False,
1190 )
1193 )
1191 coreconfigitem('ui', 'logtemplate',
1194 coreconfigitem('ui', 'logtemplate',
1192 default=None,
1195 default=None,
1193 )
1196 )
1194 coreconfigitem('ui', 'merge',
1197 coreconfigitem('ui', 'merge',
1195 default=None,
1198 default=None,
1196 )
1199 )
1197 coreconfigitem('ui', 'mergemarkers',
1200 coreconfigitem('ui', 'mergemarkers',
1198 default='basic',
1201 default='basic',
1199 )
1202 )
1200 coreconfigitem('ui', 'mergemarkertemplate',
1203 coreconfigitem('ui', 'mergemarkertemplate',
1201 default=('{node|short} '
1204 default=('{node|short} '
1202 '{ifeq(tags, "tip", "", '
1205 '{ifeq(tags, "tip", "", '
1203 'ifeq(tags, "", "", "{tags} "))}'
1206 'ifeq(tags, "", "", "{tags} "))}'
1204 '{if(bookmarks, "{bookmarks} ")}'
1207 '{if(bookmarks, "{bookmarks} ")}'
1205 '{ifeq(branch, "default", "", "{branch} ")}'
1208 '{ifeq(branch, "default", "", "{branch} ")}'
1206 '- {author|user}: {desc|firstline}')
1209 '- {author|user}: {desc|firstline}')
1207 )
1210 )
1208 coreconfigitem('ui', 'message-output',
1211 coreconfigitem('ui', 'message-output',
1209 default='stdio',
1212 default='stdio',
1210 )
1213 )
1211 coreconfigitem('ui', 'nontty',
1214 coreconfigitem('ui', 'nontty',
1212 default=False,
1215 default=False,
1213 )
1216 )
1214 coreconfigitem('ui', 'origbackuppath',
1217 coreconfigitem('ui', 'origbackuppath',
1215 default=None,
1218 default=None,
1216 )
1219 )
1217 coreconfigitem('ui', 'paginate',
1220 coreconfigitem('ui', 'paginate',
1218 default=True,
1221 default=True,
1219 )
1222 )
1220 coreconfigitem('ui', 'patch',
1223 coreconfigitem('ui', 'patch',
1221 default=None,
1224 default=None,
1222 )
1225 )
1223 coreconfigitem('ui', 'pre-merge-tool-output-template',
1226 coreconfigitem('ui', 'pre-merge-tool-output-template',
1224 default=None,
1227 default=None,
1225 )
1228 )
1226 coreconfigitem('ui', 'portablefilenames',
1229 coreconfigitem('ui', 'portablefilenames',
1227 default='warn',
1230 default='warn',
1228 )
1231 )
1229 coreconfigitem('ui', 'promptecho',
1232 coreconfigitem('ui', 'promptecho',
1230 default=False,
1233 default=False,
1231 )
1234 )
1232 coreconfigitem('ui', 'quiet',
1235 coreconfigitem('ui', 'quiet',
1233 default=False,
1236 default=False,
1234 )
1237 )
1235 coreconfigitem('ui', 'quietbookmarkmove',
1238 coreconfigitem('ui', 'quietbookmarkmove',
1236 default=False,
1239 default=False,
1237 )
1240 )
1238 coreconfigitem('ui', 'relative-paths',
1241 coreconfigitem('ui', 'relative-paths',
1239 default='legacy',
1242 default='legacy',
1240 )
1243 )
1241 coreconfigitem('ui', 'remotecmd',
1244 coreconfigitem('ui', 'remotecmd',
1242 default='hg',
1245 default='hg',
1243 )
1246 )
1244 coreconfigitem('ui', 'report_untrusted',
1247 coreconfigitem('ui', 'report_untrusted',
1245 default=True,
1248 default=True,
1246 )
1249 )
1247 coreconfigitem('ui', 'rollback',
1250 coreconfigitem('ui', 'rollback',
1248 default=True,
1251 default=True,
1249 )
1252 )
1250 coreconfigitem('ui', 'signal-safe-lock',
1253 coreconfigitem('ui', 'signal-safe-lock',
1251 default=True,
1254 default=True,
1252 )
1255 )
1253 coreconfigitem('ui', 'slash',
1256 coreconfigitem('ui', 'slash',
1254 default=False,
1257 default=False,
1255 )
1258 )
1256 coreconfigitem('ui', 'ssh',
1259 coreconfigitem('ui', 'ssh',
1257 default='ssh',
1260 default='ssh',
1258 )
1261 )
1259 coreconfigitem('ui', 'ssherrorhint',
1262 coreconfigitem('ui', 'ssherrorhint',
1260 default=None,
1263 default=None,
1261 )
1264 )
1262 coreconfigitem('ui', 'statuscopies',
1265 coreconfigitem('ui', 'statuscopies',
1263 default=False,
1266 default=False,
1264 )
1267 )
1265 coreconfigitem('ui', 'strict',
1268 coreconfigitem('ui', 'strict',
1266 default=False,
1269 default=False,
1267 )
1270 )
1268 coreconfigitem('ui', 'style',
1271 coreconfigitem('ui', 'style',
1269 default='',
1272 default='',
1270 )
1273 )
1271 coreconfigitem('ui', 'supportcontact',
1274 coreconfigitem('ui', 'supportcontact',
1272 default=None,
1275 default=None,
1273 )
1276 )
1274 coreconfigitem('ui', 'textwidth',
1277 coreconfigitem('ui', 'textwidth',
1275 default=78,
1278 default=78,
1276 )
1279 )
1277 coreconfigitem('ui', 'timeout',
1280 coreconfigitem('ui', 'timeout',
1278 default='600',
1281 default='600',
1279 )
1282 )
1280 coreconfigitem('ui', 'timeout.warn',
1283 coreconfigitem('ui', 'timeout.warn',
1281 default=0,
1284 default=0,
1282 )
1285 )
1283 coreconfigitem('ui', 'traceback',
1286 coreconfigitem('ui', 'traceback',
1284 default=False,
1287 default=False,
1285 )
1288 )
1286 coreconfigitem('ui', 'tweakdefaults',
1289 coreconfigitem('ui', 'tweakdefaults',
1287 default=False,
1290 default=False,
1288 )
1291 )
1289 coreconfigitem('ui', 'username',
1292 coreconfigitem('ui', 'username',
1290 alias=[('ui', 'user')]
1293 alias=[('ui', 'user')]
1291 )
1294 )
1292 coreconfigitem('ui', 'verbose',
1295 coreconfigitem('ui', 'verbose',
1293 default=False,
1296 default=False,
1294 )
1297 )
1295 coreconfigitem('verify', 'skipflags',
1298 coreconfigitem('verify', 'skipflags',
1296 default=None,
1299 default=None,
1297 )
1300 )
1298 coreconfigitem('web', 'allowbz2',
1301 coreconfigitem('web', 'allowbz2',
1299 default=False,
1302 default=False,
1300 )
1303 )
1301 coreconfigitem('web', 'allowgz',
1304 coreconfigitem('web', 'allowgz',
1302 default=False,
1305 default=False,
1303 )
1306 )
1304 coreconfigitem('web', 'allow-pull',
1307 coreconfigitem('web', 'allow-pull',
1305 alias=[('web', 'allowpull')],
1308 alias=[('web', 'allowpull')],
1306 default=True,
1309 default=True,
1307 )
1310 )
1308 coreconfigitem('web', 'allow-push',
1311 coreconfigitem('web', 'allow-push',
1309 alias=[('web', 'allow_push')],
1312 alias=[('web', 'allow_push')],
1310 default=list,
1313 default=list,
1311 )
1314 )
1312 coreconfigitem('web', 'allowzip',
1315 coreconfigitem('web', 'allowzip',
1313 default=False,
1316 default=False,
1314 )
1317 )
1315 coreconfigitem('web', 'archivesubrepos',
1318 coreconfigitem('web', 'archivesubrepos',
1316 default=False,
1319 default=False,
1317 )
1320 )
1318 coreconfigitem('web', 'cache',
1321 coreconfigitem('web', 'cache',
1319 default=True,
1322 default=True,
1320 )
1323 )
1321 coreconfigitem('web', 'comparisoncontext',
1324 coreconfigitem('web', 'comparisoncontext',
1322 default=5,
1325 default=5,
1323 )
1326 )
1324 coreconfigitem('web', 'contact',
1327 coreconfigitem('web', 'contact',
1325 default=None,
1328 default=None,
1326 )
1329 )
1327 coreconfigitem('web', 'deny_push',
1330 coreconfigitem('web', 'deny_push',
1328 default=list,
1331 default=list,
1329 )
1332 )
1330 coreconfigitem('web', 'guessmime',
1333 coreconfigitem('web', 'guessmime',
1331 default=False,
1334 default=False,
1332 )
1335 )
1333 coreconfigitem('web', 'hidden',
1336 coreconfigitem('web', 'hidden',
1334 default=False,
1337 default=False,
1335 )
1338 )
1336 coreconfigitem('web', 'labels',
1339 coreconfigitem('web', 'labels',
1337 default=list,
1340 default=list,
1338 )
1341 )
1339 coreconfigitem('web', 'logoimg',
1342 coreconfigitem('web', 'logoimg',
1340 default='hglogo.png',
1343 default='hglogo.png',
1341 )
1344 )
1342 coreconfigitem('web', 'logourl',
1345 coreconfigitem('web', 'logourl',
1343 default='https://mercurial-scm.org/',
1346 default='https://mercurial-scm.org/',
1344 )
1347 )
1345 coreconfigitem('web', 'accesslog',
1348 coreconfigitem('web', 'accesslog',
1346 default='-',
1349 default='-',
1347 )
1350 )
1348 coreconfigitem('web', 'address',
1351 coreconfigitem('web', 'address',
1349 default='',
1352 default='',
1350 )
1353 )
1351 coreconfigitem('web', 'allow-archive',
1354 coreconfigitem('web', 'allow-archive',
1352 alias=[('web', 'allow_archive')],
1355 alias=[('web', 'allow_archive')],
1353 default=list,
1356 default=list,
1354 )
1357 )
1355 coreconfigitem('web', 'allow_read',
1358 coreconfigitem('web', 'allow_read',
1356 default=list,
1359 default=list,
1357 )
1360 )
1358 coreconfigitem('web', 'baseurl',
1361 coreconfigitem('web', 'baseurl',
1359 default=None,
1362 default=None,
1360 )
1363 )
1361 coreconfigitem('web', 'cacerts',
1364 coreconfigitem('web', 'cacerts',
1362 default=None,
1365 default=None,
1363 )
1366 )
1364 coreconfigitem('web', 'certificate',
1367 coreconfigitem('web', 'certificate',
1365 default=None,
1368 default=None,
1366 )
1369 )
1367 coreconfigitem('web', 'collapse',
1370 coreconfigitem('web', 'collapse',
1368 default=False,
1371 default=False,
1369 )
1372 )
1370 coreconfigitem('web', 'csp',
1373 coreconfigitem('web', 'csp',
1371 default=None,
1374 default=None,
1372 )
1375 )
1373 coreconfigitem('web', 'deny_read',
1376 coreconfigitem('web', 'deny_read',
1374 default=list,
1377 default=list,
1375 )
1378 )
1376 coreconfigitem('web', 'descend',
1379 coreconfigitem('web', 'descend',
1377 default=True,
1380 default=True,
1378 )
1381 )
1379 coreconfigitem('web', 'description',
1382 coreconfigitem('web', 'description',
1380 default="",
1383 default="",
1381 )
1384 )
1382 coreconfigitem('web', 'encoding',
1385 coreconfigitem('web', 'encoding',
1383 default=lambda: encoding.encoding,
1386 default=lambda: encoding.encoding,
1384 )
1387 )
1385 coreconfigitem('web', 'errorlog',
1388 coreconfigitem('web', 'errorlog',
1386 default='-',
1389 default='-',
1387 )
1390 )
1388 coreconfigitem('web', 'ipv6',
1391 coreconfigitem('web', 'ipv6',
1389 default=False,
1392 default=False,
1390 )
1393 )
1391 coreconfigitem('web', 'maxchanges',
1394 coreconfigitem('web', 'maxchanges',
1392 default=10,
1395 default=10,
1393 )
1396 )
1394 coreconfigitem('web', 'maxfiles',
1397 coreconfigitem('web', 'maxfiles',
1395 default=10,
1398 default=10,
1396 )
1399 )
1397 coreconfigitem('web', 'maxshortchanges',
1400 coreconfigitem('web', 'maxshortchanges',
1398 default=60,
1401 default=60,
1399 )
1402 )
1400 coreconfigitem('web', 'motd',
1403 coreconfigitem('web', 'motd',
1401 default='',
1404 default='',
1402 )
1405 )
1403 coreconfigitem('web', 'name',
1406 coreconfigitem('web', 'name',
1404 default=dynamicdefault,
1407 default=dynamicdefault,
1405 )
1408 )
1406 coreconfigitem('web', 'port',
1409 coreconfigitem('web', 'port',
1407 default=8000,
1410 default=8000,
1408 )
1411 )
1409 coreconfigitem('web', 'prefix',
1412 coreconfigitem('web', 'prefix',
1410 default='',
1413 default='',
1411 )
1414 )
1412 coreconfigitem('web', 'push_ssl',
1415 coreconfigitem('web', 'push_ssl',
1413 default=True,
1416 default=True,
1414 )
1417 )
1415 coreconfigitem('web', 'refreshinterval',
1418 coreconfigitem('web', 'refreshinterval',
1416 default=20,
1419 default=20,
1417 )
1420 )
1418 coreconfigitem('web', 'server-header',
1421 coreconfigitem('web', 'server-header',
1419 default=None,
1422 default=None,
1420 )
1423 )
1421 coreconfigitem('web', 'static',
1424 coreconfigitem('web', 'static',
1422 default=None,
1425 default=None,
1423 )
1426 )
1424 coreconfigitem('web', 'staticurl',
1427 coreconfigitem('web', 'staticurl',
1425 default=None,
1428 default=None,
1426 )
1429 )
1427 coreconfigitem('web', 'stripes',
1430 coreconfigitem('web', 'stripes',
1428 default=1,
1431 default=1,
1429 )
1432 )
1430 coreconfigitem('web', 'style',
1433 coreconfigitem('web', 'style',
1431 default='paper',
1434 default='paper',
1432 )
1435 )
1433 coreconfigitem('web', 'templates',
1436 coreconfigitem('web', 'templates',
1434 default=None,
1437 default=None,
1435 )
1438 )
1436 coreconfigitem('web', 'view',
1439 coreconfigitem('web', 'view',
1437 default='served',
1440 default='served',
1438 )
1441 )
1439 coreconfigitem('worker', 'backgroundclose',
1442 coreconfigitem('worker', 'backgroundclose',
1440 default=dynamicdefault,
1443 default=dynamicdefault,
1441 )
1444 )
1442 # Windows defaults to a limit of 512 open files. A buffer of 128
1445 # Windows defaults to a limit of 512 open files. A buffer of 128
1443 # should give us enough headway.
1446 # should give us enough headway.
1444 coreconfigitem('worker', 'backgroundclosemaxqueue',
1447 coreconfigitem('worker', 'backgroundclosemaxqueue',
1445 default=384,
1448 default=384,
1446 )
1449 )
1447 coreconfigitem('worker', 'backgroundcloseminfilecount',
1450 coreconfigitem('worker', 'backgroundcloseminfilecount',
1448 default=2048,
1451 default=2048,
1449 )
1452 )
1450 coreconfigitem('worker', 'backgroundclosethreadcount',
1453 coreconfigitem('worker', 'backgroundclosethreadcount',
1451 default=4,
1454 default=4,
1452 )
1455 )
1453 coreconfigitem('worker', 'enabled',
1456 coreconfigitem('worker', 'enabled',
1454 default=True,
1457 default=True,
1455 )
1458 )
1456 coreconfigitem('worker', 'numcpus',
1459 coreconfigitem('worker', 'numcpus',
1457 default=None,
1460 default=None,
1458 )
1461 )
1459
1462
1460 # Rebase related configuration moved to core because other extension are doing
1463 # Rebase related configuration moved to core because other extension are doing
1461 # strange things. For example, shelve import the extensions to reuse some bit
1464 # strange things. For example, shelve import the extensions to reuse some bit
1462 # without formally loading it.
1465 # without formally loading it.
1463 coreconfigitem('commands', 'rebase.requiredest',
1466 coreconfigitem('commands', 'rebase.requiredest',
1464 default=False,
1467 default=False,
1465 )
1468 )
1466 coreconfigitem('experimental', 'rebaseskipobsolete',
1469 coreconfigitem('experimental', 'rebaseskipobsolete',
1467 default=True,
1470 default=True,
1468 )
1471 )
1469 coreconfigitem('rebase', 'singletransaction',
1472 coreconfigitem('rebase', 'singletransaction',
1470 default=False,
1473 default=False,
1471 )
1474 )
1472 coreconfigitem('rebase', 'experimental.inmemory',
1475 coreconfigitem('rebase', 'experimental.inmemory',
1473 default=False,
1476 default=False,
1474 )
1477 )
@@ -1,3126 +1,3136
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import os
12 import os
13 import random
13 import random
14 import sys
14 import sys
15 import time
15 import time
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 )
25 )
26 from . import (
26 from . import (
27 bookmarks,
27 bookmarks,
28 branchmap,
28 branchmap,
29 bundle2,
29 bundle2,
30 changegroup,
30 changegroup,
31 changelog,
31 changelog,
32 color,
32 color,
33 context,
33 context,
34 dirstate,
34 dirstate,
35 dirstateguard,
35 dirstateguard,
36 discovery,
36 discovery,
37 encoding,
37 encoding,
38 error,
38 error,
39 exchange,
39 exchange,
40 extensions,
40 extensions,
41 filelog,
41 filelog,
42 hook,
42 hook,
43 lock as lockmod,
43 lock as lockmod,
44 manifest,
44 manifest,
45 match as matchmod,
45 match as matchmod,
46 merge as mergemod,
46 merge as mergemod,
47 mergeutil,
47 mergeutil,
48 namespaces,
48 namespaces,
49 narrowspec,
49 narrowspec,
50 obsolete,
50 obsolete,
51 pathutil,
51 pathutil,
52 phases,
52 phases,
53 pushkey,
53 pushkey,
54 pycompat,
54 pycompat,
55 repository,
55 repository,
56 repoview,
56 repoview,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 sparse,
60 sparse,
61 store as storemod,
61 store as storemod,
62 subrepoutil,
62 subrepoutil,
63 tags as tagsmod,
63 tags as tagsmod,
64 transaction,
64 transaction,
65 txnutil,
65 txnutil,
66 util,
66 util,
67 vfs as vfsmod,
67 vfs as vfsmod,
68 )
68 )
69 from .utils import (
69 from .utils import (
70 interfaceutil,
70 interfaceutil,
71 procutil,
71 procutil,
72 stringutil,
72 stringutil,
73 )
73 )
74
74
75 from .revlogutils import (
75 from .revlogutils import (
76 constants as revlogconst,
76 constants as revlogconst,
77 )
77 )
78
78
79 release = lockmod.release
79 release = lockmod.release
80 urlerr = util.urlerr
80 urlerr = util.urlerr
81 urlreq = util.urlreq
81 urlreq = util.urlreq
82
82
83 # set of (path, vfs-location) tuples. vfs-location is:
83 # set of (path, vfs-location) tuples. vfs-location is:
84 # - 'plain for vfs relative paths
84 # - 'plain for vfs relative paths
85 # - '' for svfs relative paths
85 # - '' for svfs relative paths
86 _cachedfiles = set()
86 _cachedfiles = set()
87
87
88 class _basefilecache(scmutil.filecache):
88 class _basefilecache(scmutil.filecache):
89 """All filecache usage on repo are done for logic that should be unfiltered
89 """All filecache usage on repo are done for logic that should be unfiltered
90 """
90 """
91 def __get__(self, repo, type=None):
91 def __get__(self, repo, type=None):
92 if repo is None:
92 if repo is None:
93 return self
93 return self
94 # proxy to unfiltered __dict__ since filtered repo has no entry
94 # proxy to unfiltered __dict__ since filtered repo has no entry
95 unfi = repo.unfiltered()
95 unfi = repo.unfiltered()
96 try:
96 try:
97 return unfi.__dict__[self.sname]
97 return unfi.__dict__[self.sname]
98 except KeyError:
98 except KeyError:
99 pass
99 pass
100 return super(_basefilecache, self).__get__(unfi, type)
100 return super(_basefilecache, self).__get__(unfi, type)
101
101
102 def set(self, repo, value):
102 def set(self, repo, value):
103 return super(_basefilecache, self).set(repo.unfiltered(), value)
103 return super(_basefilecache, self).set(repo.unfiltered(), value)
104
104
105 class repofilecache(_basefilecache):
105 class repofilecache(_basefilecache):
106 """filecache for files in .hg but outside of .hg/store"""
106 """filecache for files in .hg but outside of .hg/store"""
107 def __init__(self, *paths):
107 def __init__(self, *paths):
108 super(repofilecache, self).__init__(*paths)
108 super(repofilecache, self).__init__(*paths)
109 for path in paths:
109 for path in paths:
110 _cachedfiles.add((path, 'plain'))
110 _cachedfiles.add((path, 'plain'))
111
111
112 def join(self, obj, fname):
112 def join(self, obj, fname):
113 return obj.vfs.join(fname)
113 return obj.vfs.join(fname)
114
114
115 class storecache(_basefilecache):
115 class storecache(_basefilecache):
116 """filecache for files in the store"""
116 """filecache for files in the store"""
117 def __init__(self, *paths):
117 def __init__(self, *paths):
118 super(storecache, self).__init__(*paths)
118 super(storecache, self).__init__(*paths)
119 for path in paths:
119 for path in paths:
120 _cachedfiles.add((path, ''))
120 _cachedfiles.add((path, ''))
121
121
122 def join(self, obj, fname):
122 def join(self, obj, fname):
123 return obj.sjoin(fname)
123 return obj.sjoin(fname)
124
124
125 def isfilecached(repo, name):
125 def isfilecached(repo, name):
126 """check if a repo has already cached "name" filecache-ed property
126 """check if a repo has already cached "name" filecache-ed property
127
127
128 This returns (cachedobj-or-None, iscached) tuple.
128 This returns (cachedobj-or-None, iscached) tuple.
129 """
129 """
130 cacheentry = repo.unfiltered()._filecache.get(name, None)
130 cacheentry = repo.unfiltered()._filecache.get(name, None)
131 if not cacheentry:
131 if not cacheentry:
132 return None, False
132 return None, False
133 return cacheentry.obj, True
133 return cacheentry.obj, True
134
134
135 class unfilteredpropertycache(util.propertycache):
135 class unfilteredpropertycache(util.propertycache):
136 """propertycache that apply to unfiltered repo only"""
136 """propertycache that apply to unfiltered repo only"""
137
137
138 def __get__(self, repo, type=None):
138 def __get__(self, repo, type=None):
139 unfi = repo.unfiltered()
139 unfi = repo.unfiltered()
140 if unfi is repo:
140 if unfi is repo:
141 return super(unfilteredpropertycache, self).__get__(unfi)
141 return super(unfilteredpropertycache, self).__get__(unfi)
142 return getattr(unfi, self.name)
142 return getattr(unfi, self.name)
143
143
144 class filteredpropertycache(util.propertycache):
144 class filteredpropertycache(util.propertycache):
145 """propertycache that must take filtering in account"""
145 """propertycache that must take filtering in account"""
146
146
147 def cachevalue(self, obj, value):
147 def cachevalue(self, obj, value):
148 object.__setattr__(obj, self.name, value)
148 object.__setattr__(obj, self.name, value)
149
149
150
150
151 def hasunfilteredcache(repo, name):
151 def hasunfilteredcache(repo, name):
152 """check if a repo has an unfilteredpropertycache value for <name>"""
152 """check if a repo has an unfilteredpropertycache value for <name>"""
153 return name in vars(repo.unfiltered())
153 return name in vars(repo.unfiltered())
154
154
155 def unfilteredmethod(orig):
155 def unfilteredmethod(orig):
156 """decorate method that always need to be run on unfiltered version"""
156 """decorate method that always need to be run on unfiltered version"""
157 def wrapper(repo, *args, **kwargs):
157 def wrapper(repo, *args, **kwargs):
158 return orig(repo.unfiltered(), *args, **kwargs)
158 return orig(repo.unfiltered(), *args, **kwargs)
159 return wrapper
159 return wrapper
160
160
161 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
161 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
162 'unbundle'}
162 'unbundle'}
163 legacycaps = moderncaps.union({'changegroupsubset'})
163 legacycaps = moderncaps.union({'changegroupsubset'})
164
164
165 @interfaceutil.implementer(repository.ipeercommandexecutor)
165 @interfaceutil.implementer(repository.ipeercommandexecutor)
166 class localcommandexecutor(object):
166 class localcommandexecutor(object):
167 def __init__(self, peer):
167 def __init__(self, peer):
168 self._peer = peer
168 self._peer = peer
169 self._sent = False
169 self._sent = False
170 self._closed = False
170 self._closed = False
171
171
172 def __enter__(self):
172 def __enter__(self):
173 return self
173 return self
174
174
175 def __exit__(self, exctype, excvalue, exctb):
175 def __exit__(self, exctype, excvalue, exctb):
176 self.close()
176 self.close()
177
177
178 def callcommand(self, command, args):
178 def callcommand(self, command, args):
179 if self._sent:
179 if self._sent:
180 raise error.ProgrammingError('callcommand() cannot be used after '
180 raise error.ProgrammingError('callcommand() cannot be used after '
181 'sendcommands()')
181 'sendcommands()')
182
182
183 if self._closed:
183 if self._closed:
184 raise error.ProgrammingError('callcommand() cannot be used after '
184 raise error.ProgrammingError('callcommand() cannot be used after '
185 'close()')
185 'close()')
186
186
187 # We don't need to support anything fancy. Just call the named
187 # We don't need to support anything fancy. Just call the named
188 # method on the peer and return a resolved future.
188 # method on the peer and return a resolved future.
189 fn = getattr(self._peer, pycompat.sysstr(command))
189 fn = getattr(self._peer, pycompat.sysstr(command))
190
190
191 f = pycompat.futures.Future()
191 f = pycompat.futures.Future()
192
192
193 try:
193 try:
194 result = fn(**pycompat.strkwargs(args))
194 result = fn(**pycompat.strkwargs(args))
195 except Exception:
195 except Exception:
196 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
196 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
197 else:
197 else:
198 f.set_result(result)
198 f.set_result(result)
199
199
200 return f
200 return f
201
201
202 def sendcommands(self):
202 def sendcommands(self):
203 self._sent = True
203 self._sent = True
204
204
205 def close(self):
205 def close(self):
206 self._closed = True
206 self._closed = True
207
207
208 @interfaceutil.implementer(repository.ipeercommands)
208 @interfaceutil.implementer(repository.ipeercommands)
209 class localpeer(repository.peer):
209 class localpeer(repository.peer):
210 '''peer for a local repo; reflects only the most recent API'''
210 '''peer for a local repo; reflects only the most recent API'''
211
211
212 def __init__(self, repo, caps=None):
212 def __init__(self, repo, caps=None):
213 super(localpeer, self).__init__()
213 super(localpeer, self).__init__()
214
214
215 if caps is None:
215 if caps is None:
216 caps = moderncaps.copy()
216 caps = moderncaps.copy()
217 self._repo = repo.filtered('served')
217 self._repo = repo.filtered('served')
218 self.ui = repo.ui
218 self.ui = repo.ui
219 self._caps = repo._restrictcapabilities(caps)
219 self._caps = repo._restrictcapabilities(caps)
220
220
221 # Begin of _basepeer interface.
221 # Begin of _basepeer interface.
222
222
223 def url(self):
223 def url(self):
224 return self._repo.url()
224 return self._repo.url()
225
225
226 def local(self):
226 def local(self):
227 return self._repo
227 return self._repo
228
228
229 def peer(self):
229 def peer(self):
230 return self
230 return self
231
231
232 def canpush(self):
232 def canpush(self):
233 return True
233 return True
234
234
235 def close(self):
235 def close(self):
236 self._repo.close()
236 self._repo.close()
237
237
238 # End of _basepeer interface.
238 # End of _basepeer interface.
239
239
240 # Begin of _basewirecommands interface.
240 # Begin of _basewirecommands interface.
241
241
242 def branchmap(self):
242 def branchmap(self):
243 return self._repo.branchmap()
243 return self._repo.branchmap()
244
244
245 def capabilities(self):
245 def capabilities(self):
246 return self._caps
246 return self._caps
247
247
248 def clonebundles(self):
248 def clonebundles(self):
249 return self._repo.tryread('clonebundles.manifest')
249 return self._repo.tryread('clonebundles.manifest')
250
250
251 def debugwireargs(self, one, two, three=None, four=None, five=None):
251 def debugwireargs(self, one, two, three=None, four=None, five=None):
252 """Used to test argument passing over the wire"""
252 """Used to test argument passing over the wire"""
253 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
253 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
254 pycompat.bytestr(four),
254 pycompat.bytestr(four),
255 pycompat.bytestr(five))
255 pycompat.bytestr(five))
256
256
257 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
257 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
258 **kwargs):
258 **kwargs):
259 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
259 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
260 common=common, bundlecaps=bundlecaps,
260 common=common, bundlecaps=bundlecaps,
261 **kwargs)[1]
261 **kwargs)[1]
262 cb = util.chunkbuffer(chunks)
262 cb = util.chunkbuffer(chunks)
263
263
264 if exchange.bundle2requested(bundlecaps):
264 if exchange.bundle2requested(bundlecaps):
265 # When requesting a bundle2, getbundle returns a stream to make the
265 # When requesting a bundle2, getbundle returns a stream to make the
266 # wire level function happier. We need to build a proper object
266 # wire level function happier. We need to build a proper object
267 # from it in local peer.
267 # from it in local peer.
268 return bundle2.getunbundler(self.ui, cb)
268 return bundle2.getunbundler(self.ui, cb)
269 else:
269 else:
270 return changegroup.getunbundler('01', cb, None)
270 return changegroup.getunbundler('01', cb, None)
271
271
272 def heads(self):
272 def heads(self):
273 return self._repo.heads()
273 return self._repo.heads()
274
274
275 def known(self, nodes):
275 def known(self, nodes):
276 return self._repo.known(nodes)
276 return self._repo.known(nodes)
277
277
278 def listkeys(self, namespace):
278 def listkeys(self, namespace):
279 return self._repo.listkeys(namespace)
279 return self._repo.listkeys(namespace)
280
280
281 def lookup(self, key):
281 def lookup(self, key):
282 return self._repo.lookup(key)
282 return self._repo.lookup(key)
283
283
284 def pushkey(self, namespace, key, old, new):
284 def pushkey(self, namespace, key, old, new):
285 return self._repo.pushkey(namespace, key, old, new)
285 return self._repo.pushkey(namespace, key, old, new)
286
286
287 def stream_out(self):
287 def stream_out(self):
288 raise error.Abort(_('cannot perform stream clone against local '
288 raise error.Abort(_('cannot perform stream clone against local '
289 'peer'))
289 'peer'))
290
290
291 def unbundle(self, bundle, heads, url):
291 def unbundle(self, bundle, heads, url):
292 """apply a bundle on a repo
292 """apply a bundle on a repo
293
293
294 This function handles the repo locking itself."""
294 This function handles the repo locking itself."""
295 try:
295 try:
296 try:
296 try:
297 bundle = exchange.readbundle(self.ui, bundle, None)
297 bundle = exchange.readbundle(self.ui, bundle, None)
298 ret = exchange.unbundle(self._repo, bundle, heads, 'push', url)
298 ret = exchange.unbundle(self._repo, bundle, heads, 'push', url)
299 if util.safehasattr(ret, 'getchunks'):
299 if util.safehasattr(ret, 'getchunks'):
300 # This is a bundle20 object, turn it into an unbundler.
300 # This is a bundle20 object, turn it into an unbundler.
301 # This little dance should be dropped eventually when the
301 # This little dance should be dropped eventually when the
302 # API is finally improved.
302 # API is finally improved.
303 stream = util.chunkbuffer(ret.getchunks())
303 stream = util.chunkbuffer(ret.getchunks())
304 ret = bundle2.getunbundler(self.ui, stream)
304 ret = bundle2.getunbundler(self.ui, stream)
305 return ret
305 return ret
306 except Exception as exc:
306 except Exception as exc:
307 # If the exception contains output salvaged from a bundle2
307 # If the exception contains output salvaged from a bundle2
308 # reply, we need to make sure it is printed before continuing
308 # reply, we need to make sure it is printed before continuing
309 # to fail. So we build a bundle2 with such output and consume
309 # to fail. So we build a bundle2 with such output and consume
310 # it directly.
310 # it directly.
311 #
311 #
312 # This is not very elegant but allows a "simple" solution for
312 # This is not very elegant but allows a "simple" solution for
313 # issue4594
313 # issue4594
314 output = getattr(exc, '_bundle2salvagedoutput', ())
314 output = getattr(exc, '_bundle2salvagedoutput', ())
315 if output:
315 if output:
316 bundler = bundle2.bundle20(self._repo.ui)
316 bundler = bundle2.bundle20(self._repo.ui)
317 for out in output:
317 for out in output:
318 bundler.addpart(out)
318 bundler.addpart(out)
319 stream = util.chunkbuffer(bundler.getchunks())
319 stream = util.chunkbuffer(bundler.getchunks())
320 b = bundle2.getunbundler(self.ui, stream)
320 b = bundle2.getunbundler(self.ui, stream)
321 bundle2.processbundle(self._repo, b)
321 bundle2.processbundle(self._repo, b)
322 raise
322 raise
323 except error.PushRaced as exc:
323 except error.PushRaced as exc:
324 raise error.ResponseError(_('push failed:'),
324 raise error.ResponseError(_('push failed:'),
325 stringutil.forcebytestr(exc))
325 stringutil.forcebytestr(exc))
326
326
327 # End of _basewirecommands interface.
327 # End of _basewirecommands interface.
328
328
329 # Begin of peer interface.
329 # Begin of peer interface.
330
330
331 def commandexecutor(self):
331 def commandexecutor(self):
332 return localcommandexecutor(self)
332 return localcommandexecutor(self)
333
333
334 # End of peer interface.
334 # End of peer interface.
335
335
336 @interfaceutil.implementer(repository.ipeerlegacycommands)
336 @interfaceutil.implementer(repository.ipeerlegacycommands)
337 class locallegacypeer(localpeer):
337 class locallegacypeer(localpeer):
338 '''peer extension which implements legacy methods too; used for tests with
338 '''peer extension which implements legacy methods too; used for tests with
339 restricted capabilities'''
339 restricted capabilities'''
340
340
341 def __init__(self, repo):
341 def __init__(self, repo):
342 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
342 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
343
343
344 # Begin of baselegacywirecommands interface.
344 # Begin of baselegacywirecommands interface.
345
345
346 def between(self, pairs):
346 def between(self, pairs):
347 return self._repo.between(pairs)
347 return self._repo.between(pairs)
348
348
349 def branches(self, nodes):
349 def branches(self, nodes):
350 return self._repo.branches(nodes)
350 return self._repo.branches(nodes)
351
351
352 def changegroup(self, nodes, source):
352 def changegroup(self, nodes, source):
353 outgoing = discovery.outgoing(self._repo, missingroots=nodes,
353 outgoing = discovery.outgoing(self._repo, missingroots=nodes,
354 missingheads=self._repo.heads())
354 missingheads=self._repo.heads())
355 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
355 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
356
356
357 def changegroupsubset(self, bases, heads, source):
357 def changegroupsubset(self, bases, heads, source):
358 outgoing = discovery.outgoing(self._repo, missingroots=bases,
358 outgoing = discovery.outgoing(self._repo, missingroots=bases,
359 missingheads=heads)
359 missingheads=heads)
360 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
360 return changegroup.makechangegroup(self._repo, outgoing, '01', source)
361
361
362 # End of baselegacywirecommands interface.
362 # End of baselegacywirecommands interface.
363
363
364 # Increment the sub-version when the revlog v2 format changes to lock out old
364 # Increment the sub-version when the revlog v2 format changes to lock out old
365 # clients.
365 # clients.
366 REVLOGV2_REQUIREMENT = 'exp-revlogv2.1'
366 REVLOGV2_REQUIREMENT = 'exp-revlogv2.1'
367
367
368 # A repository with the sparserevlog feature will have delta chains that
368 # A repository with the sparserevlog feature will have delta chains that
369 # can spread over a larger span. Sparse reading cuts these large spans into
369 # can spread over a larger span. Sparse reading cuts these large spans into
370 # pieces, so that each piece isn't too big.
370 # pieces, so that each piece isn't too big.
371 # Without the sparserevlog capability, reading from the repository could use
371 # Without the sparserevlog capability, reading from the repository could use
372 # huge amounts of memory, because the whole span would be read at once,
372 # huge amounts of memory, because the whole span would be read at once,
373 # including all the intermediate revisions that aren't pertinent for the chain.
373 # including all the intermediate revisions that aren't pertinent for the chain.
374 # This is why once a repository has enabled sparse-read, it becomes required.
374 # This is why once a repository has enabled sparse-read, it becomes required.
375 SPARSEREVLOG_REQUIREMENT = 'sparserevlog'
375 SPARSEREVLOG_REQUIREMENT = 'sparserevlog'
376
376
377 # Functions receiving (ui, features) that extensions can register to impact
377 # Functions receiving (ui, features) that extensions can register to impact
378 # the ability to load repositories with custom requirements. Only
378 # the ability to load repositories with custom requirements. Only
379 # functions defined in loaded extensions are called.
379 # functions defined in loaded extensions are called.
380 #
380 #
381 # The function receives a set of requirement strings that the repository
381 # The function receives a set of requirement strings that the repository
382 # is capable of opening. Functions will typically add elements to the
382 # is capable of opening. Functions will typically add elements to the
383 # set to reflect that the extension knows how to handle that requirements.
383 # set to reflect that the extension knows how to handle that requirements.
384 featuresetupfuncs = set()
384 featuresetupfuncs = set()
385
385
386 def makelocalrepository(baseui, path, intents=None):
386 def makelocalrepository(baseui, path, intents=None):
387 """Create a local repository object.
387 """Create a local repository object.
388
388
389 Given arguments needed to construct a local repository, this function
389 Given arguments needed to construct a local repository, this function
390 performs various early repository loading functionality (such as
390 performs various early repository loading functionality (such as
391 reading the ``.hg/requires`` and ``.hg/hgrc`` files), validates that
391 reading the ``.hg/requires`` and ``.hg/hgrc`` files), validates that
392 the repository can be opened, derives a type suitable for representing
392 the repository can be opened, derives a type suitable for representing
393 that repository, and returns an instance of it.
393 that repository, and returns an instance of it.
394
394
395 The returned object conforms to the ``repository.completelocalrepository``
395 The returned object conforms to the ``repository.completelocalrepository``
396 interface.
396 interface.
397
397
398 The repository type is derived by calling a series of factory functions
398 The repository type is derived by calling a series of factory functions
399 for each aspect/interface of the final repository. These are defined by
399 for each aspect/interface of the final repository. These are defined by
400 ``REPO_INTERFACES``.
400 ``REPO_INTERFACES``.
401
401
402 Each factory function is called to produce a type implementing a specific
402 Each factory function is called to produce a type implementing a specific
403 interface. The cumulative list of returned types will be combined into a
403 interface. The cumulative list of returned types will be combined into a
404 new type and that type will be instantiated to represent the local
404 new type and that type will be instantiated to represent the local
405 repository.
405 repository.
406
406
407 The factory functions each receive various state that may be consulted
407 The factory functions each receive various state that may be consulted
408 as part of deriving a type.
408 as part of deriving a type.
409
409
410 Extensions should wrap these factory functions to customize repository type
410 Extensions should wrap these factory functions to customize repository type
411 creation. Note that an extension's wrapped function may be called even if
411 creation. Note that an extension's wrapped function may be called even if
412 that extension is not loaded for the repo being constructed. Extensions
412 that extension is not loaded for the repo being constructed. Extensions
413 should check if their ``__name__`` appears in the
413 should check if their ``__name__`` appears in the
414 ``extensionmodulenames`` set passed to the factory function and no-op if
414 ``extensionmodulenames`` set passed to the factory function and no-op if
415 not.
415 not.
416 """
416 """
417 ui = baseui.copy()
417 ui = baseui.copy()
418 # Prevent copying repo configuration.
418 # Prevent copying repo configuration.
419 ui.copy = baseui.copy
419 ui.copy = baseui.copy
420
420
421 # Working directory VFS rooted at repository root.
421 # Working directory VFS rooted at repository root.
422 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
422 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
423
423
424 # Main VFS for .hg/ directory.
424 # Main VFS for .hg/ directory.
425 hgpath = wdirvfs.join(b'.hg')
425 hgpath = wdirvfs.join(b'.hg')
426 hgvfs = vfsmod.vfs(hgpath, cacheaudited=True)
426 hgvfs = vfsmod.vfs(hgpath, cacheaudited=True)
427
427
428 # The .hg/ path should exist and should be a directory. All other
428 # The .hg/ path should exist and should be a directory. All other
429 # cases are errors.
429 # cases are errors.
430 if not hgvfs.isdir():
430 if not hgvfs.isdir():
431 try:
431 try:
432 hgvfs.stat()
432 hgvfs.stat()
433 except OSError as e:
433 except OSError as e:
434 if e.errno != errno.ENOENT:
434 if e.errno != errno.ENOENT:
435 raise
435 raise
436
436
437 raise error.RepoError(_(b'repository %s not found') % path)
437 raise error.RepoError(_(b'repository %s not found') % path)
438
438
439 # .hg/requires file contains a newline-delimited list of
439 # .hg/requires file contains a newline-delimited list of
440 # features/capabilities the opener (us) must have in order to use
440 # features/capabilities the opener (us) must have in order to use
441 # the repository. This file was introduced in Mercurial 0.9.2,
441 # the repository. This file was introduced in Mercurial 0.9.2,
442 # which means very old repositories may not have one. We assume
442 # which means very old repositories may not have one. We assume
443 # a missing file translates to no requirements.
443 # a missing file translates to no requirements.
444 try:
444 try:
445 requirements = set(hgvfs.read(b'requires').splitlines())
445 requirements = set(hgvfs.read(b'requires').splitlines())
446 except IOError as e:
446 except IOError as e:
447 if e.errno != errno.ENOENT:
447 if e.errno != errno.ENOENT:
448 raise
448 raise
449 requirements = set()
449 requirements = set()
450
450
451 # The .hg/hgrc file may load extensions or contain config options
451 # The .hg/hgrc file may load extensions or contain config options
452 # that influence repository construction. Attempt to load it and
452 # that influence repository construction. Attempt to load it and
453 # process any new extensions that it may have pulled in.
453 # process any new extensions that it may have pulled in.
454 if loadhgrc(ui, wdirvfs, hgvfs, requirements):
454 if loadhgrc(ui, wdirvfs, hgvfs, requirements):
455 afterhgrcload(ui, wdirvfs, hgvfs, requirements)
455 afterhgrcload(ui, wdirvfs, hgvfs, requirements)
456 extensions.loadall(ui)
456 extensions.loadall(ui)
457 extensions.populateui(ui)
457 extensions.populateui(ui)
458
458
459 # Set of module names of extensions loaded for this repository.
459 # Set of module names of extensions loaded for this repository.
460 extensionmodulenames = {m.__name__ for n, m in extensions.extensions(ui)}
460 extensionmodulenames = {m.__name__ for n, m in extensions.extensions(ui)}
461
461
462 supportedrequirements = gathersupportedrequirements(ui)
462 supportedrequirements = gathersupportedrequirements(ui)
463
463
464 # We first validate the requirements are known.
464 # We first validate the requirements are known.
465 ensurerequirementsrecognized(requirements, supportedrequirements)
465 ensurerequirementsrecognized(requirements, supportedrequirements)
466
466
467 # Then we validate that the known set is reasonable to use together.
467 # Then we validate that the known set is reasonable to use together.
468 ensurerequirementscompatible(ui, requirements)
468 ensurerequirementscompatible(ui, requirements)
469
469
470 # TODO there are unhandled edge cases related to opening repositories with
470 # TODO there are unhandled edge cases related to opening repositories with
471 # shared storage. If storage is shared, we should also test for requirements
471 # shared storage. If storage is shared, we should also test for requirements
472 # compatibility in the pointed-to repo. This entails loading the .hg/hgrc in
472 # compatibility in the pointed-to repo. This entails loading the .hg/hgrc in
473 # that repo, as that repo may load extensions needed to open it. This is a
473 # that repo, as that repo may load extensions needed to open it. This is a
474 # bit complicated because we don't want the other hgrc to overwrite settings
474 # bit complicated because we don't want the other hgrc to overwrite settings
475 # in this hgrc.
475 # in this hgrc.
476 #
476 #
477 # This bug is somewhat mitigated by the fact that we copy the .hg/requires
477 # This bug is somewhat mitigated by the fact that we copy the .hg/requires
478 # file when sharing repos. But if a requirement is added after the share is
478 # file when sharing repos. But if a requirement is added after the share is
479 # performed, thereby introducing a new requirement for the opener, we may
479 # performed, thereby introducing a new requirement for the opener, we may
480 # will not see that and could encounter a run-time error interacting with
480 # will not see that and could encounter a run-time error interacting with
481 # that shared store since it has an unknown-to-us requirement.
481 # that shared store since it has an unknown-to-us requirement.
482
482
483 # At this point, we know we should be capable of opening the repository.
483 # At this point, we know we should be capable of opening the repository.
484 # Now get on with doing that.
484 # Now get on with doing that.
485
485
486 features = set()
486 features = set()
487
487
488 # The "store" part of the repository holds versioned data. How it is
488 # The "store" part of the repository holds versioned data. How it is
489 # accessed is determined by various requirements. The ``shared`` or
489 # accessed is determined by various requirements. The ``shared`` or
490 # ``relshared`` requirements indicate the store lives in the path contained
490 # ``relshared`` requirements indicate the store lives in the path contained
491 # in the ``.hg/sharedpath`` file. This is an absolute path for
491 # in the ``.hg/sharedpath`` file. This is an absolute path for
492 # ``shared`` and relative to ``.hg/`` for ``relshared``.
492 # ``shared`` and relative to ``.hg/`` for ``relshared``.
493 if b'shared' in requirements or b'relshared' in requirements:
493 if b'shared' in requirements or b'relshared' in requirements:
494 sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n')
494 sharedpath = hgvfs.read(b'sharedpath').rstrip(b'\n')
495 if b'relshared' in requirements:
495 if b'relshared' in requirements:
496 sharedpath = hgvfs.join(sharedpath)
496 sharedpath = hgvfs.join(sharedpath)
497
497
498 sharedvfs = vfsmod.vfs(sharedpath, realpath=True)
498 sharedvfs = vfsmod.vfs(sharedpath, realpath=True)
499
499
500 if not sharedvfs.exists():
500 if not sharedvfs.exists():
501 raise error.RepoError(_(b'.hg/sharedpath points to nonexistent '
501 raise error.RepoError(_(b'.hg/sharedpath points to nonexistent '
502 b'directory %s') % sharedvfs.base)
502 b'directory %s') % sharedvfs.base)
503
503
504 features.add(repository.REPO_FEATURE_SHARED_STORAGE)
504 features.add(repository.REPO_FEATURE_SHARED_STORAGE)
505
505
506 storebasepath = sharedvfs.base
506 storebasepath = sharedvfs.base
507 cachepath = sharedvfs.join(b'cache')
507 cachepath = sharedvfs.join(b'cache')
508 else:
508 else:
509 storebasepath = hgvfs.base
509 storebasepath = hgvfs.base
510 cachepath = hgvfs.join(b'cache')
510 cachepath = hgvfs.join(b'cache')
511 wcachepath = hgvfs.join(b'wcache')
511 wcachepath = hgvfs.join(b'wcache')
512
512
513
513
514 # The store has changed over time and the exact layout is dictated by
514 # The store has changed over time and the exact layout is dictated by
515 # requirements. The store interface abstracts differences across all
515 # requirements. The store interface abstracts differences across all
516 # of them.
516 # of them.
517 store = makestore(requirements, storebasepath,
517 store = makestore(requirements, storebasepath,
518 lambda base: vfsmod.vfs(base, cacheaudited=True))
518 lambda base: vfsmod.vfs(base, cacheaudited=True))
519 hgvfs.createmode = store.createmode
519 hgvfs.createmode = store.createmode
520
520
521 storevfs = store.vfs
521 storevfs = store.vfs
522 storevfs.options = resolvestorevfsoptions(ui, requirements, features)
522 storevfs.options = resolvestorevfsoptions(ui, requirements, features)
523
523
524 # The cache vfs is used to manage cache files.
524 # The cache vfs is used to manage cache files.
525 cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
525 cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
526 cachevfs.createmode = store.createmode
526 cachevfs.createmode = store.createmode
527 # The cache vfs is used to manage cache files related to the working copy
527 # The cache vfs is used to manage cache files related to the working copy
528 wcachevfs = vfsmod.vfs(wcachepath, cacheaudited=True)
528 wcachevfs = vfsmod.vfs(wcachepath, cacheaudited=True)
529 wcachevfs.createmode = store.createmode
529 wcachevfs.createmode = store.createmode
530
530
531 # Now resolve the type for the repository object. We do this by repeatedly
531 # Now resolve the type for the repository object. We do this by repeatedly
532 # calling a factory function to produces types for specific aspects of the
532 # calling a factory function to produces types for specific aspects of the
533 # repo's operation. The aggregate returned types are used as base classes
533 # repo's operation. The aggregate returned types are used as base classes
534 # for a dynamically-derived type, which will represent our new repository.
534 # for a dynamically-derived type, which will represent our new repository.
535
535
536 bases = []
536 bases = []
537 extrastate = {}
537 extrastate = {}
538
538
539 for iface, fn in REPO_INTERFACES:
539 for iface, fn in REPO_INTERFACES:
540 # We pass all potentially useful state to give extensions tons of
540 # We pass all potentially useful state to give extensions tons of
541 # flexibility.
541 # flexibility.
542 typ = fn()(ui=ui,
542 typ = fn()(ui=ui,
543 intents=intents,
543 intents=intents,
544 requirements=requirements,
544 requirements=requirements,
545 features=features,
545 features=features,
546 wdirvfs=wdirvfs,
546 wdirvfs=wdirvfs,
547 hgvfs=hgvfs,
547 hgvfs=hgvfs,
548 store=store,
548 store=store,
549 storevfs=storevfs,
549 storevfs=storevfs,
550 storeoptions=storevfs.options,
550 storeoptions=storevfs.options,
551 cachevfs=cachevfs,
551 cachevfs=cachevfs,
552 wcachevfs=wcachevfs,
552 wcachevfs=wcachevfs,
553 extensionmodulenames=extensionmodulenames,
553 extensionmodulenames=extensionmodulenames,
554 extrastate=extrastate,
554 extrastate=extrastate,
555 baseclasses=bases)
555 baseclasses=bases)
556
556
557 if not isinstance(typ, type):
557 if not isinstance(typ, type):
558 raise error.ProgrammingError('unable to construct type for %s' %
558 raise error.ProgrammingError('unable to construct type for %s' %
559 iface)
559 iface)
560
560
561 bases.append(typ)
561 bases.append(typ)
562
562
563 # type() allows you to use characters in type names that wouldn't be
563 # type() allows you to use characters in type names that wouldn't be
564 # recognized as Python symbols in source code. We abuse that to add
564 # recognized as Python symbols in source code. We abuse that to add
565 # rich information about our constructed repo.
565 # rich information about our constructed repo.
566 name = pycompat.sysstr(b'derivedrepo:%s<%s>' % (
566 name = pycompat.sysstr(b'derivedrepo:%s<%s>' % (
567 wdirvfs.base,
567 wdirvfs.base,
568 b','.join(sorted(requirements))))
568 b','.join(sorted(requirements))))
569
569
570 cls = type(name, tuple(bases), {})
570 cls = type(name, tuple(bases), {})
571
571
572 return cls(
572 return cls(
573 baseui=baseui,
573 baseui=baseui,
574 ui=ui,
574 ui=ui,
575 origroot=path,
575 origroot=path,
576 wdirvfs=wdirvfs,
576 wdirvfs=wdirvfs,
577 hgvfs=hgvfs,
577 hgvfs=hgvfs,
578 requirements=requirements,
578 requirements=requirements,
579 supportedrequirements=supportedrequirements,
579 supportedrequirements=supportedrequirements,
580 sharedpath=storebasepath,
580 sharedpath=storebasepath,
581 store=store,
581 store=store,
582 cachevfs=cachevfs,
582 cachevfs=cachevfs,
583 wcachevfs=wcachevfs,
583 wcachevfs=wcachevfs,
584 features=features,
584 features=features,
585 intents=intents)
585 intents=intents)
586
586
587 def loadhgrc(ui, wdirvfs, hgvfs, requirements):
587 def loadhgrc(ui, wdirvfs, hgvfs, requirements):
588 """Load hgrc files/content into a ui instance.
588 """Load hgrc files/content into a ui instance.
589
589
590 This is called during repository opening to load any additional
590 This is called during repository opening to load any additional
591 config files or settings relevant to the current repository.
591 config files or settings relevant to the current repository.
592
592
593 Returns a bool indicating whether any additional configs were loaded.
593 Returns a bool indicating whether any additional configs were loaded.
594
594
595 Extensions should monkeypatch this function to modify how per-repo
595 Extensions should monkeypatch this function to modify how per-repo
596 configs are loaded. For example, an extension may wish to pull in
596 configs are loaded. For example, an extension may wish to pull in
597 configs from alternate files or sources.
597 configs from alternate files or sources.
598 """
598 """
599 try:
599 try:
600 ui.readconfig(hgvfs.join(b'hgrc'), root=wdirvfs.base)
600 ui.readconfig(hgvfs.join(b'hgrc'), root=wdirvfs.base)
601 return True
601 return True
602 except IOError:
602 except IOError:
603 return False
603 return False
604
604
605 def afterhgrcload(ui, wdirvfs, hgvfs, requirements):
605 def afterhgrcload(ui, wdirvfs, hgvfs, requirements):
606 """Perform additional actions after .hg/hgrc is loaded.
606 """Perform additional actions after .hg/hgrc is loaded.
607
607
608 This function is called during repository loading immediately after
608 This function is called during repository loading immediately after
609 the .hg/hgrc file is loaded and before per-repo extensions are loaded.
609 the .hg/hgrc file is loaded and before per-repo extensions are loaded.
610
610
611 The function can be used to validate configs, automatically add
611 The function can be used to validate configs, automatically add
612 options (including extensions) based on requirements, etc.
612 options (including extensions) based on requirements, etc.
613 """
613 """
614
614
615 # Map of requirements to list of extensions to load automatically when
615 # Map of requirements to list of extensions to load automatically when
616 # requirement is present.
616 # requirement is present.
617 autoextensions = {
617 autoextensions = {
618 b'largefiles': [b'largefiles'],
618 b'largefiles': [b'largefiles'],
619 b'lfs': [b'lfs'],
619 b'lfs': [b'lfs'],
620 }
620 }
621
621
622 for requirement, names in sorted(autoextensions.items()):
622 for requirement, names in sorted(autoextensions.items()):
623 if requirement not in requirements:
623 if requirement not in requirements:
624 continue
624 continue
625
625
626 for name in names:
626 for name in names:
627 if not ui.hasconfig(b'extensions', name):
627 if not ui.hasconfig(b'extensions', name):
628 ui.setconfig(b'extensions', name, b'', source='autoload')
628 ui.setconfig(b'extensions', name, b'', source='autoload')
629
629
630 def gathersupportedrequirements(ui):
630 def gathersupportedrequirements(ui):
631 """Determine the complete set of recognized requirements."""
631 """Determine the complete set of recognized requirements."""
632 # Start with all requirements supported by this file.
632 # Start with all requirements supported by this file.
633 supported = set(localrepository._basesupported)
633 supported = set(localrepository._basesupported)
634
634
635 # Execute ``featuresetupfuncs`` entries if they belong to an extension
635 # Execute ``featuresetupfuncs`` entries if they belong to an extension
636 # relevant to this ui instance.
636 # relevant to this ui instance.
637 modules = {m.__name__ for n, m in extensions.extensions(ui)}
637 modules = {m.__name__ for n, m in extensions.extensions(ui)}
638
638
639 for fn in featuresetupfuncs:
639 for fn in featuresetupfuncs:
640 if fn.__module__ in modules:
640 if fn.__module__ in modules:
641 fn(ui, supported)
641 fn(ui, supported)
642
642
643 # Add derived requirements from registered compression engines.
643 # Add derived requirements from registered compression engines.
644 for name in util.compengines:
644 for name in util.compengines:
645 engine = util.compengines[name]
645 engine = util.compengines[name]
646 if engine.available() and engine.revlogheader():
646 if engine.available() and engine.revlogheader():
647 supported.add(b'exp-compression-%s' % name)
647 supported.add(b'exp-compression-%s' % name)
648 if engine.name() == 'zstd':
648 if engine.name() == 'zstd':
649 supported.add(b'revlog-compression-zstd')
649 supported.add(b'revlog-compression-zstd')
650
650
651 return supported
651 return supported
652
652
653 def ensurerequirementsrecognized(requirements, supported):
653 def ensurerequirementsrecognized(requirements, supported):
654 """Validate that a set of local requirements is recognized.
654 """Validate that a set of local requirements is recognized.
655
655
656 Receives a set of requirements. Raises an ``error.RepoError`` if there
656 Receives a set of requirements. Raises an ``error.RepoError`` if there
657 exists any requirement in that set that currently loaded code doesn't
657 exists any requirement in that set that currently loaded code doesn't
658 recognize.
658 recognize.
659
659
660 Returns a set of supported requirements.
660 Returns a set of supported requirements.
661 """
661 """
662 missing = set()
662 missing = set()
663
663
664 for requirement in requirements:
664 for requirement in requirements:
665 if requirement in supported:
665 if requirement in supported:
666 continue
666 continue
667
667
668 if not requirement or not requirement[0:1].isalnum():
668 if not requirement or not requirement[0:1].isalnum():
669 raise error.RequirementError(_(b'.hg/requires file is corrupt'))
669 raise error.RequirementError(_(b'.hg/requires file is corrupt'))
670
670
671 missing.add(requirement)
671 missing.add(requirement)
672
672
673 if missing:
673 if missing:
674 raise error.RequirementError(
674 raise error.RequirementError(
675 _(b'repository requires features unknown to this Mercurial: %s') %
675 _(b'repository requires features unknown to this Mercurial: %s') %
676 b' '.join(sorted(missing)),
676 b' '.join(sorted(missing)),
677 hint=_(b'see https://mercurial-scm.org/wiki/MissingRequirement '
677 hint=_(b'see https://mercurial-scm.org/wiki/MissingRequirement '
678 b'for more information'))
678 b'for more information'))
679
679
680 def ensurerequirementscompatible(ui, requirements):
680 def ensurerequirementscompatible(ui, requirements):
681 """Validates that a set of recognized requirements is mutually compatible.
681 """Validates that a set of recognized requirements is mutually compatible.
682
682
683 Some requirements may not be compatible with others or require
683 Some requirements may not be compatible with others or require
684 config options that aren't enabled. This function is called during
684 config options that aren't enabled. This function is called during
685 repository opening to ensure that the set of requirements needed
685 repository opening to ensure that the set of requirements needed
686 to open a repository is sane and compatible with config options.
686 to open a repository is sane and compatible with config options.
687
687
688 Extensions can monkeypatch this function to perform additional
688 Extensions can monkeypatch this function to perform additional
689 checking.
689 checking.
690
690
691 ``error.RepoError`` should be raised on failure.
691 ``error.RepoError`` should be raised on failure.
692 """
692 """
693 if b'exp-sparse' in requirements and not sparse.enabled:
693 if b'exp-sparse' in requirements and not sparse.enabled:
694 raise error.RepoError(_(b'repository is using sparse feature but '
694 raise error.RepoError(_(b'repository is using sparse feature but '
695 b'sparse is not enabled; enable the '
695 b'sparse is not enabled; enable the '
696 b'"sparse" extensions to access'))
696 b'"sparse" extensions to access'))
697
697
698 def makestore(requirements, path, vfstype):
698 def makestore(requirements, path, vfstype):
699 """Construct a storage object for a repository."""
699 """Construct a storage object for a repository."""
700 if b'store' in requirements:
700 if b'store' in requirements:
701 if b'fncache' in requirements:
701 if b'fncache' in requirements:
702 return storemod.fncachestore(path, vfstype,
702 return storemod.fncachestore(path, vfstype,
703 b'dotencode' in requirements)
703 b'dotencode' in requirements)
704
704
705 return storemod.encodedstore(path, vfstype)
705 return storemod.encodedstore(path, vfstype)
706
706
707 return storemod.basicstore(path, vfstype)
707 return storemod.basicstore(path, vfstype)
708
708
709 def resolvestorevfsoptions(ui, requirements, features):
709 def resolvestorevfsoptions(ui, requirements, features):
710 """Resolve the options to pass to the store vfs opener.
710 """Resolve the options to pass to the store vfs opener.
711
711
712 The returned dict is used to influence behavior of the storage layer.
712 The returned dict is used to influence behavior of the storage layer.
713 """
713 """
714 options = {}
714 options = {}
715
715
716 if b'treemanifest' in requirements:
716 if b'treemanifest' in requirements:
717 options[b'treemanifest'] = True
717 options[b'treemanifest'] = True
718
718
719 # experimental config: format.manifestcachesize
719 # experimental config: format.manifestcachesize
720 manifestcachesize = ui.configint(b'format', b'manifestcachesize')
720 manifestcachesize = ui.configint(b'format', b'manifestcachesize')
721 if manifestcachesize is not None:
721 if manifestcachesize is not None:
722 options[b'manifestcachesize'] = manifestcachesize
722 options[b'manifestcachesize'] = manifestcachesize
723
723
724 # In the absence of another requirement superseding a revlog-related
724 # In the absence of another requirement superseding a revlog-related
725 # requirement, we have to assume the repo is using revlog version 0.
725 # requirement, we have to assume the repo is using revlog version 0.
726 # This revlog format is super old and we don't bother trying to parse
726 # This revlog format is super old and we don't bother trying to parse
727 # opener options for it because those options wouldn't do anything
727 # opener options for it because those options wouldn't do anything
728 # meaningful on such old repos.
728 # meaningful on such old repos.
729 if b'revlogv1' in requirements or REVLOGV2_REQUIREMENT in requirements:
729 if b'revlogv1' in requirements or REVLOGV2_REQUIREMENT in requirements:
730 options.update(resolverevlogstorevfsoptions(ui, requirements, features))
730 options.update(resolverevlogstorevfsoptions(ui, requirements, features))
731
731
732 return options
732 return options
733
733
734 def resolverevlogstorevfsoptions(ui, requirements, features):
734 def resolverevlogstorevfsoptions(ui, requirements, features):
735 """Resolve opener options specific to revlogs."""
735 """Resolve opener options specific to revlogs."""
736
736
737 options = {}
737 options = {}
738 options[b'flagprocessors'] = {}
738 options[b'flagprocessors'] = {}
739
739
740 if b'revlogv1' in requirements:
740 if b'revlogv1' in requirements:
741 options[b'revlogv1'] = True
741 options[b'revlogv1'] = True
742 if REVLOGV2_REQUIREMENT in requirements:
742 if REVLOGV2_REQUIREMENT in requirements:
743 options[b'revlogv2'] = True
743 options[b'revlogv2'] = True
744
744
745 if b'generaldelta' in requirements:
745 if b'generaldelta' in requirements:
746 options[b'generaldelta'] = True
746 options[b'generaldelta'] = True
747
747
748 # experimental config: format.chunkcachesize
748 # experimental config: format.chunkcachesize
749 chunkcachesize = ui.configint(b'format', b'chunkcachesize')
749 chunkcachesize = ui.configint(b'format', b'chunkcachesize')
750 if chunkcachesize is not None:
750 if chunkcachesize is not None:
751 options[b'chunkcachesize'] = chunkcachesize
751 options[b'chunkcachesize'] = chunkcachesize
752
752
753 deltabothparents = ui.configbool(b'storage',
753 deltabothparents = ui.configbool(b'storage',
754 b'revlog.optimize-delta-parent-choice')
754 b'revlog.optimize-delta-parent-choice')
755 options[b'deltabothparents'] = deltabothparents
755 options[b'deltabothparents'] = deltabothparents
756
756
757 lazydelta = ui.configbool(b'storage', b'revlog.reuse-external-delta')
757 lazydelta = ui.configbool(b'storage', b'revlog.reuse-external-delta')
758 lazydeltabase = False
758 lazydeltabase = False
759 if lazydelta:
759 if lazydelta:
760 lazydeltabase = ui.configbool(b'storage',
760 lazydeltabase = ui.configbool(b'storage',
761 b'revlog.reuse-external-delta-parent')
761 b'revlog.reuse-external-delta-parent')
762 if lazydeltabase is None:
762 if lazydeltabase is None:
763 lazydeltabase = not scmutil.gddeltaconfig(ui)
763 lazydeltabase = not scmutil.gddeltaconfig(ui)
764 options[b'lazydelta'] = lazydelta
764 options[b'lazydelta'] = lazydelta
765 options[b'lazydeltabase'] = lazydeltabase
765 options[b'lazydeltabase'] = lazydeltabase
766
766
767 chainspan = ui.configbytes(b'experimental', b'maxdeltachainspan')
767 chainspan = ui.configbytes(b'experimental', b'maxdeltachainspan')
768 if 0 <= chainspan:
768 if 0 <= chainspan:
769 options[b'maxdeltachainspan'] = chainspan
769 options[b'maxdeltachainspan'] = chainspan
770
770
771 mmapindexthreshold = ui.configbytes(b'experimental',
771 mmapindexthreshold = ui.configbytes(b'experimental',
772 b'mmapindexthreshold')
772 b'mmapindexthreshold')
773 if mmapindexthreshold is not None:
773 if mmapindexthreshold is not None:
774 options[b'mmapindexthreshold'] = mmapindexthreshold
774 options[b'mmapindexthreshold'] = mmapindexthreshold
775
775
776 withsparseread = ui.configbool(b'experimental', b'sparse-read')
776 withsparseread = ui.configbool(b'experimental', b'sparse-read')
777 srdensitythres = float(ui.config(b'experimental',
777 srdensitythres = float(ui.config(b'experimental',
778 b'sparse-read.density-threshold'))
778 b'sparse-read.density-threshold'))
779 srmingapsize = ui.configbytes(b'experimental',
779 srmingapsize = ui.configbytes(b'experimental',
780 b'sparse-read.min-gap-size')
780 b'sparse-read.min-gap-size')
781 options[b'with-sparse-read'] = withsparseread
781 options[b'with-sparse-read'] = withsparseread
782 options[b'sparse-read-density-threshold'] = srdensitythres
782 options[b'sparse-read-density-threshold'] = srdensitythres
783 options[b'sparse-read-min-gap-size'] = srmingapsize
783 options[b'sparse-read-min-gap-size'] = srmingapsize
784
784
785 sparserevlog = SPARSEREVLOG_REQUIREMENT in requirements
785 sparserevlog = SPARSEREVLOG_REQUIREMENT in requirements
786 options[b'sparse-revlog'] = sparserevlog
786 options[b'sparse-revlog'] = sparserevlog
787 if sparserevlog:
787 if sparserevlog:
788 options[b'generaldelta'] = True
788 options[b'generaldelta'] = True
789
789
790 maxchainlen = None
790 maxchainlen = None
791 if sparserevlog:
791 if sparserevlog:
792 maxchainlen = revlogconst.SPARSE_REVLOG_MAX_CHAIN_LENGTH
792 maxchainlen = revlogconst.SPARSE_REVLOG_MAX_CHAIN_LENGTH
793 # experimental config: format.maxchainlen
793 # experimental config: format.maxchainlen
794 maxchainlen = ui.configint(b'format', b'maxchainlen', maxchainlen)
794 maxchainlen = ui.configint(b'format', b'maxchainlen', maxchainlen)
795 if maxchainlen is not None:
795 if maxchainlen is not None:
796 options[b'maxchainlen'] = maxchainlen
796 options[b'maxchainlen'] = maxchainlen
797
797
798 for r in requirements:
798 for r in requirements:
799 # we allow multiple compression engine requirement to co-exist because
799 # we allow multiple compression engine requirement to co-exist because
800 # strickly speaking, revlog seems to support mixed compression style.
800 # strickly speaking, revlog seems to support mixed compression style.
801 #
801 #
802 # The compression used for new entries will be "the last one"
802 # The compression used for new entries will be "the last one"
803 prefix = r.startswith
803 prefix = r.startswith
804 if prefix('revlog-compression-') or prefix('exp-compression-'):
804 if prefix('revlog-compression-') or prefix('exp-compression-'):
805 options[b'compengine'] = r.split('-', 2)[2]
805 options[b'compengine'] = r.split('-', 2)[2]
806
806
807 options[b'zlib.level'] = ui.configint(b'storage', b'revlog.zlib.level')
807 options[b'zlib.level'] = ui.configint(b'storage', b'revlog.zlib.level')
808 if options[b'zlib.level'] is not None:
808 if options[b'zlib.level'] is not None:
809 if not (0 <= options[b'zlib.level'] <= 9):
809 if not (0 <= options[b'zlib.level'] <= 9):
810 msg = _('invalid value for `storage.revlog.zlib.level` config: %d')
810 msg = _('invalid value for `storage.revlog.zlib.level` config: %d')
811 raise error.Abort(msg % options[b'zlib.level'])
811 raise error.Abort(msg % options[b'zlib.level'])
812 options[b'zstd.level'] = ui.configint(b'storage', b'revlog.zstd.level')
812 options[b'zstd.level'] = ui.configint(b'storage', b'revlog.zstd.level')
813 if options[b'zstd.level'] is not None:
813 if options[b'zstd.level'] is not None:
814 if not (0 <= options[b'zstd.level'] <= 22):
814 if not (0 <= options[b'zstd.level'] <= 22):
815 msg = _('invalid value for `storage.revlog.zstd.level` config: %d')
815 msg = _('invalid value for `storage.revlog.zstd.level` config: %d')
816 raise error.Abort(msg % options[b'zstd.level'])
816 raise error.Abort(msg % options[b'zstd.level'])
817
817
818 if repository.NARROW_REQUIREMENT in requirements:
818 if repository.NARROW_REQUIREMENT in requirements:
819 options[b'enableellipsis'] = True
819 options[b'enableellipsis'] = True
820
820
821 return options
821 return options
822
822
823 def makemain(**kwargs):
823 def makemain(**kwargs):
824 """Produce a type conforming to ``ilocalrepositorymain``."""
824 """Produce a type conforming to ``ilocalrepositorymain``."""
825 return localrepository
825 return localrepository
826
826
827 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
827 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
828 class revlogfilestorage(object):
828 class revlogfilestorage(object):
829 """File storage when using revlogs."""
829 """File storage when using revlogs."""
830
830
831 def file(self, path):
831 def file(self, path):
832 if path[0] == b'/':
832 if path[0] == b'/':
833 path = path[1:]
833 path = path[1:]
834
834
835 return filelog.filelog(self.svfs, path)
835 return filelog.filelog(self.svfs, path)
836
836
837 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
837 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
838 class revlognarrowfilestorage(object):
838 class revlognarrowfilestorage(object):
839 """File storage when using revlogs and narrow files."""
839 """File storage when using revlogs and narrow files."""
840
840
841 def file(self, path):
841 def file(self, path):
842 if path[0] == b'/':
842 if path[0] == b'/':
843 path = path[1:]
843 path = path[1:]
844
844
845 return filelog.narrowfilelog(self.svfs, path, self._storenarrowmatch)
845 return filelog.narrowfilelog(self.svfs, path, self._storenarrowmatch)
846
846
847 def makefilestorage(requirements, features, **kwargs):
847 def makefilestorage(requirements, features, **kwargs):
848 """Produce a type conforming to ``ilocalrepositoryfilestorage``."""
848 """Produce a type conforming to ``ilocalrepositoryfilestorage``."""
849 features.add(repository.REPO_FEATURE_REVLOG_FILE_STORAGE)
849 features.add(repository.REPO_FEATURE_REVLOG_FILE_STORAGE)
850 features.add(repository.REPO_FEATURE_STREAM_CLONE)
850 features.add(repository.REPO_FEATURE_STREAM_CLONE)
851
851
852 if repository.NARROW_REQUIREMENT in requirements:
852 if repository.NARROW_REQUIREMENT in requirements:
853 return revlognarrowfilestorage
853 return revlognarrowfilestorage
854 else:
854 else:
855 return revlogfilestorage
855 return revlogfilestorage
856
856
857 # List of repository interfaces and factory functions for them. Each
857 # List of repository interfaces and factory functions for them. Each
858 # will be called in order during ``makelocalrepository()`` to iteratively
858 # will be called in order during ``makelocalrepository()`` to iteratively
859 # derive the final type for a local repository instance. We capture the
859 # derive the final type for a local repository instance. We capture the
860 # function as a lambda so we don't hold a reference and the module-level
860 # function as a lambda so we don't hold a reference and the module-level
861 # functions can be wrapped.
861 # functions can be wrapped.
862 REPO_INTERFACES = [
862 REPO_INTERFACES = [
863 (repository.ilocalrepositorymain, lambda: makemain),
863 (repository.ilocalrepositorymain, lambda: makemain),
864 (repository.ilocalrepositoryfilestorage, lambda: makefilestorage),
864 (repository.ilocalrepositoryfilestorage, lambda: makefilestorage),
865 ]
865 ]
866
866
867 @interfaceutil.implementer(repository.ilocalrepositorymain)
867 @interfaceutil.implementer(repository.ilocalrepositorymain)
868 class localrepository(object):
868 class localrepository(object):
869 """Main class for representing local repositories.
869 """Main class for representing local repositories.
870
870
871 All local repositories are instances of this class.
871 All local repositories are instances of this class.
872
872
873 Constructed on its own, instances of this class are not usable as
873 Constructed on its own, instances of this class are not usable as
874 repository objects. To obtain a usable repository object, call
874 repository objects. To obtain a usable repository object, call
875 ``hg.repository()``, ``localrepo.instance()``, or
875 ``hg.repository()``, ``localrepo.instance()``, or
876 ``localrepo.makelocalrepository()``. The latter is the lowest-level.
876 ``localrepo.makelocalrepository()``. The latter is the lowest-level.
877 ``instance()`` adds support for creating new repositories.
877 ``instance()`` adds support for creating new repositories.
878 ``hg.repository()`` adds more extension integration, including calling
878 ``hg.repository()`` adds more extension integration, including calling
879 ``reposetup()``. Generally speaking, ``hg.repository()`` should be
879 ``reposetup()``. Generally speaking, ``hg.repository()`` should be
880 used.
880 used.
881 """
881 """
882
882
883 # obsolete experimental requirements:
883 # obsolete experimental requirements:
884 # - manifestv2: An experimental new manifest format that allowed
884 # - manifestv2: An experimental new manifest format that allowed
885 # for stem compression of long paths. Experiment ended up not
885 # for stem compression of long paths. Experiment ended up not
886 # being successful (repository sizes went up due to worse delta
886 # being successful (repository sizes went up due to worse delta
887 # chains), and the code was deleted in 4.6.
887 # chains), and the code was deleted in 4.6.
888 supportedformats = {
888 supportedformats = {
889 'revlogv1',
889 'revlogv1',
890 'generaldelta',
890 'generaldelta',
891 'treemanifest',
891 'treemanifest',
892 REVLOGV2_REQUIREMENT,
892 REVLOGV2_REQUIREMENT,
893 SPARSEREVLOG_REQUIREMENT,
893 SPARSEREVLOG_REQUIREMENT,
894 }
894 }
895 _basesupported = supportedformats | {
895 _basesupported = supportedformats | {
896 'store',
896 'store',
897 'fncache',
897 'fncache',
898 'shared',
898 'shared',
899 'relshared',
899 'relshared',
900 'dotencode',
900 'dotencode',
901 'exp-sparse',
901 'exp-sparse',
902 'internal-phase'
902 'internal-phase'
903 }
903 }
904
904
905 # list of prefix for file which can be written without 'wlock'
905 # list of prefix for file which can be written without 'wlock'
906 # Extensions should extend this list when needed
906 # Extensions should extend this list when needed
907 _wlockfreeprefix = {
907 _wlockfreeprefix = {
908 # We migh consider requiring 'wlock' for the next
908 # We migh consider requiring 'wlock' for the next
909 # two, but pretty much all the existing code assume
909 # two, but pretty much all the existing code assume
910 # wlock is not needed so we keep them excluded for
910 # wlock is not needed so we keep them excluded for
911 # now.
911 # now.
912 'hgrc',
912 'hgrc',
913 'requires',
913 'requires',
914 # XXX cache is a complicatged business someone
914 # XXX cache is a complicatged business someone
915 # should investigate this in depth at some point
915 # should investigate this in depth at some point
916 'cache/',
916 'cache/',
917 # XXX shouldn't be dirstate covered by the wlock?
917 # XXX shouldn't be dirstate covered by the wlock?
918 'dirstate',
918 'dirstate',
919 # XXX bisect was still a bit too messy at the time
919 # XXX bisect was still a bit too messy at the time
920 # this changeset was introduced. Someone should fix
920 # this changeset was introduced. Someone should fix
921 # the remainig bit and drop this line
921 # the remainig bit and drop this line
922 'bisect.state',
922 'bisect.state',
923 }
923 }
924
924
925 def __init__(self, baseui, ui, origroot, wdirvfs, hgvfs, requirements,
925 def __init__(self, baseui, ui, origroot, wdirvfs, hgvfs, requirements,
926 supportedrequirements, sharedpath, store, cachevfs, wcachevfs,
926 supportedrequirements, sharedpath, store, cachevfs, wcachevfs,
927 features, intents=None):
927 features, intents=None):
928 """Create a new local repository instance.
928 """Create a new local repository instance.
929
929
930 Most callers should use ``hg.repository()``, ``localrepo.instance()``,
930 Most callers should use ``hg.repository()``, ``localrepo.instance()``,
931 or ``localrepo.makelocalrepository()`` for obtaining a new repository
931 or ``localrepo.makelocalrepository()`` for obtaining a new repository
932 object.
932 object.
933
933
934 Arguments:
934 Arguments:
935
935
936 baseui
936 baseui
937 ``ui.ui`` instance that ``ui`` argument was based off of.
937 ``ui.ui`` instance that ``ui`` argument was based off of.
938
938
939 ui
939 ui
940 ``ui.ui`` instance for use by the repository.
940 ``ui.ui`` instance for use by the repository.
941
941
942 origroot
942 origroot
943 ``bytes`` path to working directory root of this repository.
943 ``bytes`` path to working directory root of this repository.
944
944
945 wdirvfs
945 wdirvfs
946 ``vfs.vfs`` rooted at the working directory.
946 ``vfs.vfs`` rooted at the working directory.
947
947
948 hgvfs
948 hgvfs
949 ``vfs.vfs`` rooted at .hg/
949 ``vfs.vfs`` rooted at .hg/
950
950
951 requirements
951 requirements
952 ``set`` of bytestrings representing repository opening requirements.
952 ``set`` of bytestrings representing repository opening requirements.
953
953
954 supportedrequirements
954 supportedrequirements
955 ``set`` of bytestrings representing repository requirements that we
955 ``set`` of bytestrings representing repository requirements that we
956 know how to open. May be a supetset of ``requirements``.
956 know how to open. May be a supetset of ``requirements``.
957
957
958 sharedpath
958 sharedpath
959 ``bytes`` Defining path to storage base directory. Points to a
959 ``bytes`` Defining path to storage base directory. Points to a
960 ``.hg/`` directory somewhere.
960 ``.hg/`` directory somewhere.
961
961
962 store
962 store
963 ``store.basicstore`` (or derived) instance providing access to
963 ``store.basicstore`` (or derived) instance providing access to
964 versioned storage.
964 versioned storage.
965
965
966 cachevfs
966 cachevfs
967 ``vfs.vfs`` used for cache files.
967 ``vfs.vfs`` used for cache files.
968
968
969 wcachevfs
969 wcachevfs
970 ``vfs.vfs`` used for cache files related to the working copy.
970 ``vfs.vfs`` used for cache files related to the working copy.
971
971
972 features
972 features
973 ``set`` of bytestrings defining features/capabilities of this
973 ``set`` of bytestrings defining features/capabilities of this
974 instance.
974 instance.
975
975
976 intents
976 intents
977 ``set`` of system strings indicating what this repo will be used
977 ``set`` of system strings indicating what this repo will be used
978 for.
978 for.
979 """
979 """
980 self.baseui = baseui
980 self.baseui = baseui
981 self.ui = ui
981 self.ui = ui
982 self.origroot = origroot
982 self.origroot = origroot
983 # vfs rooted at working directory.
983 # vfs rooted at working directory.
984 self.wvfs = wdirvfs
984 self.wvfs = wdirvfs
985 self.root = wdirvfs.base
985 self.root = wdirvfs.base
986 # vfs rooted at .hg/. Used to access most non-store paths.
986 # vfs rooted at .hg/. Used to access most non-store paths.
987 self.vfs = hgvfs
987 self.vfs = hgvfs
988 self.path = hgvfs.base
988 self.path = hgvfs.base
989 self.requirements = requirements
989 self.requirements = requirements
990 self.supported = supportedrequirements
990 self.supported = supportedrequirements
991 self.sharedpath = sharedpath
991 self.sharedpath = sharedpath
992 self.store = store
992 self.store = store
993 self.cachevfs = cachevfs
993 self.cachevfs = cachevfs
994 self.wcachevfs = wcachevfs
994 self.wcachevfs = wcachevfs
995 self.features = features
995 self.features = features
996
996
997 self.filtername = None
997 self.filtername = None
998
998
999 if (self.ui.configbool('devel', 'all-warnings') or
999 if (self.ui.configbool('devel', 'all-warnings') or
1000 self.ui.configbool('devel', 'check-locks')):
1000 self.ui.configbool('devel', 'check-locks')):
1001 self.vfs.audit = self._getvfsward(self.vfs.audit)
1001 self.vfs.audit = self._getvfsward(self.vfs.audit)
1002 # A list of callback to shape the phase if no data were found.
1002 # A list of callback to shape the phase if no data were found.
1003 # Callback are in the form: func(repo, roots) --> processed root.
1003 # Callback are in the form: func(repo, roots) --> processed root.
1004 # This list it to be filled by extension during repo setup
1004 # This list it to be filled by extension during repo setup
1005 self._phasedefaults = []
1005 self._phasedefaults = []
1006
1006
1007 color.setup(self.ui)
1007 color.setup(self.ui)
1008
1008
1009 self.spath = self.store.path
1009 self.spath = self.store.path
1010 self.svfs = self.store.vfs
1010 self.svfs = self.store.vfs
1011 self.sjoin = self.store.join
1011 self.sjoin = self.store.join
1012 if (self.ui.configbool('devel', 'all-warnings') or
1012 if (self.ui.configbool('devel', 'all-warnings') or
1013 self.ui.configbool('devel', 'check-locks')):
1013 self.ui.configbool('devel', 'check-locks')):
1014 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
1014 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
1015 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
1015 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
1016 else: # standard vfs
1016 else: # standard vfs
1017 self.svfs.audit = self._getsvfsward(self.svfs.audit)
1017 self.svfs.audit = self._getsvfsward(self.svfs.audit)
1018
1018
1019 self._dirstatevalidatewarned = False
1019 self._dirstatevalidatewarned = False
1020
1020
1021 self._branchcaches = branchmap.BranchMapCache()
1021 self._branchcaches = branchmap.BranchMapCache()
1022 self._revbranchcache = None
1022 self._revbranchcache = None
1023 self._filterpats = {}
1023 self._filterpats = {}
1024 self._datafilters = {}
1024 self._datafilters = {}
1025 self._transref = self._lockref = self._wlockref = None
1025 self._transref = self._lockref = self._wlockref = None
1026
1026
1027 # A cache for various files under .hg/ that tracks file changes,
1027 # A cache for various files under .hg/ that tracks file changes,
1028 # (used by the filecache decorator)
1028 # (used by the filecache decorator)
1029 #
1029 #
1030 # Maps a property name to its util.filecacheentry
1030 # Maps a property name to its util.filecacheentry
1031 self._filecache = {}
1031 self._filecache = {}
1032
1032
1033 # hold sets of revision to be filtered
1033 # hold sets of revision to be filtered
1034 # should be cleared when something might have changed the filter value:
1034 # should be cleared when something might have changed the filter value:
1035 # - new changesets,
1035 # - new changesets,
1036 # - phase change,
1036 # - phase change,
1037 # - new obsolescence marker,
1037 # - new obsolescence marker,
1038 # - working directory parent change,
1038 # - working directory parent change,
1039 # - bookmark changes
1039 # - bookmark changes
1040 self.filteredrevcache = {}
1040 self.filteredrevcache = {}
1041
1041
1042 # post-dirstate-status hooks
1042 # post-dirstate-status hooks
1043 self._postdsstatus = []
1043 self._postdsstatus = []
1044
1044
1045 # generic mapping between names and nodes
1045 # generic mapping between names and nodes
1046 self.names = namespaces.namespaces()
1046 self.names = namespaces.namespaces()
1047
1047
1048 # Key to signature value.
1048 # Key to signature value.
1049 self._sparsesignaturecache = {}
1049 self._sparsesignaturecache = {}
1050 # Signature to cached matcher instance.
1050 # Signature to cached matcher instance.
1051 self._sparsematchercache = {}
1051 self._sparsematchercache = {}
1052
1052
1053 def _getvfsward(self, origfunc):
1053 def _getvfsward(self, origfunc):
1054 """build a ward for self.vfs"""
1054 """build a ward for self.vfs"""
1055 rref = weakref.ref(self)
1055 rref = weakref.ref(self)
1056 def checkvfs(path, mode=None):
1056 def checkvfs(path, mode=None):
1057 ret = origfunc(path, mode=mode)
1057 ret = origfunc(path, mode=mode)
1058 repo = rref()
1058 repo = rref()
1059 if (repo is None
1059 if (repo is None
1060 or not util.safehasattr(repo, '_wlockref')
1060 or not util.safehasattr(repo, '_wlockref')
1061 or not util.safehasattr(repo, '_lockref')):
1061 or not util.safehasattr(repo, '_lockref')):
1062 return
1062 return
1063 if mode in (None, 'r', 'rb'):
1063 if mode in (None, 'r', 'rb'):
1064 return
1064 return
1065 if path.startswith(repo.path):
1065 if path.startswith(repo.path):
1066 # truncate name relative to the repository (.hg)
1066 # truncate name relative to the repository (.hg)
1067 path = path[len(repo.path) + 1:]
1067 path = path[len(repo.path) + 1:]
1068 if path.startswith('cache/'):
1068 if path.startswith('cache/'):
1069 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
1069 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
1070 repo.ui.develwarn(msg % path, stacklevel=3, config="cache-vfs")
1070 repo.ui.develwarn(msg % path, stacklevel=3, config="cache-vfs")
1071 if path.startswith('journal.') or path.startswith('undo.'):
1071 if path.startswith('journal.') or path.startswith('undo.'):
1072 # journal is covered by 'lock'
1072 # journal is covered by 'lock'
1073 if repo._currentlock(repo._lockref) is None:
1073 if repo._currentlock(repo._lockref) is None:
1074 repo.ui.develwarn('write with no lock: "%s"' % path,
1074 repo.ui.develwarn('write with no lock: "%s"' % path,
1075 stacklevel=3, config='check-locks')
1075 stacklevel=3, config='check-locks')
1076 elif repo._currentlock(repo._wlockref) is None:
1076 elif repo._currentlock(repo._wlockref) is None:
1077 # rest of vfs files are covered by 'wlock'
1077 # rest of vfs files are covered by 'wlock'
1078 #
1078 #
1079 # exclude special files
1079 # exclude special files
1080 for prefix in self._wlockfreeprefix:
1080 for prefix in self._wlockfreeprefix:
1081 if path.startswith(prefix):
1081 if path.startswith(prefix):
1082 return
1082 return
1083 repo.ui.develwarn('write with no wlock: "%s"' % path,
1083 repo.ui.develwarn('write with no wlock: "%s"' % path,
1084 stacklevel=3, config='check-locks')
1084 stacklevel=3, config='check-locks')
1085 return ret
1085 return ret
1086 return checkvfs
1086 return checkvfs
1087
1087
1088 def _getsvfsward(self, origfunc):
1088 def _getsvfsward(self, origfunc):
1089 """build a ward for self.svfs"""
1089 """build a ward for self.svfs"""
1090 rref = weakref.ref(self)
1090 rref = weakref.ref(self)
1091 def checksvfs(path, mode=None):
1091 def checksvfs(path, mode=None):
1092 ret = origfunc(path, mode=mode)
1092 ret = origfunc(path, mode=mode)
1093 repo = rref()
1093 repo = rref()
1094 if repo is None or not util.safehasattr(repo, '_lockref'):
1094 if repo is None or not util.safehasattr(repo, '_lockref'):
1095 return
1095 return
1096 if mode in (None, 'r', 'rb'):
1096 if mode in (None, 'r', 'rb'):
1097 return
1097 return
1098 if path.startswith(repo.sharedpath):
1098 if path.startswith(repo.sharedpath):
1099 # truncate name relative to the repository (.hg)
1099 # truncate name relative to the repository (.hg)
1100 path = path[len(repo.sharedpath) + 1:]
1100 path = path[len(repo.sharedpath) + 1:]
1101 if repo._currentlock(repo._lockref) is None:
1101 if repo._currentlock(repo._lockref) is None:
1102 repo.ui.develwarn('write with no lock: "%s"' % path,
1102 repo.ui.develwarn('write with no lock: "%s"' % path,
1103 stacklevel=4)
1103 stacklevel=4)
1104 return ret
1104 return ret
1105 return checksvfs
1105 return checksvfs
1106
1106
1107 def close(self):
1107 def close(self):
1108 self._writecaches()
1108 self._writecaches()
1109
1109
1110 def _writecaches(self):
1110 def _writecaches(self):
1111 if self._revbranchcache:
1111 if self._revbranchcache:
1112 self._revbranchcache.write()
1112 self._revbranchcache.write()
1113
1113
1114 def _restrictcapabilities(self, caps):
1114 def _restrictcapabilities(self, caps):
1115 if self.ui.configbool('experimental', 'bundle2-advertise'):
1115 if self.ui.configbool('experimental', 'bundle2-advertise'):
1116 caps = set(caps)
1116 caps = set(caps)
1117 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self,
1117 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self,
1118 role='client'))
1118 role='client'))
1119 caps.add('bundle2=' + urlreq.quote(capsblob))
1119 caps.add('bundle2=' + urlreq.quote(capsblob))
1120 return caps
1120 return caps
1121
1121
1122 def _writerequirements(self):
1122 def _writerequirements(self):
1123 scmutil.writerequires(self.vfs, self.requirements)
1123 scmutil.writerequires(self.vfs, self.requirements)
1124
1124
1125 # Don't cache auditor/nofsauditor, or you'll end up with reference cycle:
1125 # Don't cache auditor/nofsauditor, or you'll end up with reference cycle:
1126 # self -> auditor -> self._checknested -> self
1126 # self -> auditor -> self._checknested -> self
1127
1127
1128 @property
1128 @property
1129 def auditor(self):
1129 def auditor(self):
1130 # This is only used by context.workingctx.match in order to
1130 # This is only used by context.workingctx.match in order to
1131 # detect files in subrepos.
1131 # detect files in subrepos.
1132 return pathutil.pathauditor(self.root, callback=self._checknested)
1132 return pathutil.pathauditor(self.root, callback=self._checknested)
1133
1133
1134 @property
1134 @property
1135 def nofsauditor(self):
1135 def nofsauditor(self):
1136 # This is only used by context.basectx.match in order to detect
1136 # This is only used by context.basectx.match in order to detect
1137 # files in subrepos.
1137 # files in subrepos.
1138 return pathutil.pathauditor(self.root, callback=self._checknested,
1138 return pathutil.pathauditor(self.root, callback=self._checknested,
1139 realfs=False, cached=True)
1139 realfs=False, cached=True)
1140
1140
1141 def _checknested(self, path):
1141 def _checknested(self, path):
1142 """Determine if path is a legal nested repository."""
1142 """Determine if path is a legal nested repository."""
1143 if not path.startswith(self.root):
1143 if not path.startswith(self.root):
1144 return False
1144 return False
1145 subpath = path[len(self.root) + 1:]
1145 subpath = path[len(self.root) + 1:]
1146 normsubpath = util.pconvert(subpath)
1146 normsubpath = util.pconvert(subpath)
1147
1147
1148 # XXX: Checking against the current working copy is wrong in
1148 # XXX: Checking against the current working copy is wrong in
1149 # the sense that it can reject things like
1149 # the sense that it can reject things like
1150 #
1150 #
1151 # $ hg cat -r 10 sub/x.txt
1151 # $ hg cat -r 10 sub/x.txt
1152 #
1152 #
1153 # if sub/ is no longer a subrepository in the working copy
1153 # if sub/ is no longer a subrepository in the working copy
1154 # parent revision.
1154 # parent revision.
1155 #
1155 #
1156 # However, it can of course also allow things that would have
1156 # However, it can of course also allow things that would have
1157 # been rejected before, such as the above cat command if sub/
1157 # been rejected before, such as the above cat command if sub/
1158 # is a subrepository now, but was a normal directory before.
1158 # is a subrepository now, but was a normal directory before.
1159 # The old path auditor would have rejected by mistake since it
1159 # The old path auditor would have rejected by mistake since it
1160 # panics when it sees sub/.hg/.
1160 # panics when it sees sub/.hg/.
1161 #
1161 #
1162 # All in all, checking against the working copy seems sensible
1162 # All in all, checking against the working copy seems sensible
1163 # since we want to prevent access to nested repositories on
1163 # since we want to prevent access to nested repositories on
1164 # the filesystem *now*.
1164 # the filesystem *now*.
1165 ctx = self[None]
1165 ctx = self[None]
1166 parts = util.splitpath(subpath)
1166 parts = util.splitpath(subpath)
1167 while parts:
1167 while parts:
1168 prefix = '/'.join(parts)
1168 prefix = '/'.join(parts)
1169 if prefix in ctx.substate:
1169 if prefix in ctx.substate:
1170 if prefix == normsubpath:
1170 if prefix == normsubpath:
1171 return True
1171 return True
1172 else:
1172 else:
1173 sub = ctx.sub(prefix)
1173 sub = ctx.sub(prefix)
1174 return sub.checknested(subpath[len(prefix) + 1:])
1174 return sub.checknested(subpath[len(prefix) + 1:])
1175 else:
1175 else:
1176 parts.pop()
1176 parts.pop()
1177 return False
1177 return False
1178
1178
1179 def peer(self):
1179 def peer(self):
1180 return localpeer(self) # not cached to avoid reference cycle
1180 return localpeer(self) # not cached to avoid reference cycle
1181
1181
1182 def unfiltered(self):
1182 def unfiltered(self):
1183 """Return unfiltered version of the repository
1183 """Return unfiltered version of the repository
1184
1184
1185 Intended to be overwritten by filtered repo."""
1185 Intended to be overwritten by filtered repo."""
1186 return self
1186 return self
1187
1187
1188 def filtered(self, name, visibilityexceptions=None):
1188 def filtered(self, name, visibilityexceptions=None):
1189 """Return a filtered version of a repository
1189 """Return a filtered version of a repository
1190
1190
1191 The `name` parameter is the identifier of the requested view. This
1191 The `name` parameter is the identifier of the requested view. This
1192 will return a repoview object set "exactly" to the specified view.
1192 will return a repoview object set "exactly" to the specified view.
1193
1193
1194 This function does not apply recursive filtering to a repository. For
1194 This function does not apply recursive filtering to a repository. For
1195 example calling `repo.filtered("served")` will return a repoview using
1195 example calling `repo.filtered("served")` will return a repoview using
1196 the "served" view, regardless of the initial view used by `repo`.
1196 the "served" view, regardless of the initial view used by `repo`.
1197
1197
1198 In other word, there is always only one level of `repoview` "filtering".
1198 In other word, there is always only one level of `repoview` "filtering".
1199 """
1199 """
1200 cls = repoview.newtype(self.unfiltered().__class__)
1200 cls = repoview.newtype(self.unfiltered().__class__)
1201 return cls(self, name, visibilityexceptions)
1201 return cls(self, name, visibilityexceptions)
1202
1202
1203 @repofilecache('bookmarks', 'bookmarks.current')
1203 @repofilecache('bookmarks', 'bookmarks.current')
1204 def _bookmarks(self):
1204 def _bookmarks(self):
1205 return bookmarks.bmstore(self)
1205 return bookmarks.bmstore(self)
1206
1206
1207 @property
1207 @property
1208 def _activebookmark(self):
1208 def _activebookmark(self):
1209 return self._bookmarks.active
1209 return self._bookmarks.active
1210
1210
1211 # _phasesets depend on changelog. what we need is to call
1211 # _phasesets depend on changelog. what we need is to call
1212 # _phasecache.invalidate() if '00changelog.i' was changed, but it
1212 # _phasecache.invalidate() if '00changelog.i' was changed, but it
1213 # can't be easily expressed in filecache mechanism.
1213 # can't be easily expressed in filecache mechanism.
1214 @storecache('phaseroots', '00changelog.i')
1214 @storecache('phaseroots', '00changelog.i')
1215 def _phasecache(self):
1215 def _phasecache(self):
1216 return phases.phasecache(self, self._phasedefaults)
1216 return phases.phasecache(self, self._phasedefaults)
1217
1217
1218 @storecache('obsstore')
1218 @storecache('obsstore')
1219 def obsstore(self):
1219 def obsstore(self):
1220 return obsolete.makestore(self.ui, self)
1220 return obsolete.makestore(self.ui, self)
1221
1221
1222 @storecache('00changelog.i')
1222 @storecache('00changelog.i')
1223 def changelog(self):
1223 def changelog(self):
1224 return changelog.changelog(self.svfs,
1224 return changelog.changelog(self.svfs,
1225 trypending=txnutil.mayhavepending(self.root))
1225 trypending=txnutil.mayhavepending(self.root))
1226
1226
1227 @storecache('00manifest.i')
1227 @storecache('00manifest.i')
1228 def manifestlog(self):
1228 def manifestlog(self):
1229 rootstore = manifest.manifestrevlog(self.svfs)
1229 rootstore = manifest.manifestrevlog(self.svfs)
1230 return manifest.manifestlog(self.svfs, self, rootstore,
1230 return manifest.manifestlog(self.svfs, self, rootstore,
1231 self._storenarrowmatch)
1231 self._storenarrowmatch)
1232
1232
1233 @repofilecache('dirstate')
1233 @repofilecache('dirstate')
1234 def dirstate(self):
1234 def dirstate(self):
1235 return self._makedirstate()
1235 return self._makedirstate()
1236
1236
1237 def _makedirstate(self):
1237 def _makedirstate(self):
1238 """Extension point for wrapping the dirstate per-repo."""
1238 """Extension point for wrapping the dirstate per-repo."""
1239 sparsematchfn = lambda: sparse.matcher(self)
1239 sparsematchfn = lambda: sparse.matcher(self)
1240
1240
1241 return dirstate.dirstate(self.vfs, self.ui, self.root,
1241 return dirstate.dirstate(self.vfs, self.ui, self.root,
1242 self._dirstatevalidate, sparsematchfn)
1242 self._dirstatevalidate, sparsematchfn)
1243
1243
1244 def _dirstatevalidate(self, node):
1244 def _dirstatevalidate(self, node):
1245 try:
1245 try:
1246 self.changelog.rev(node)
1246 self.changelog.rev(node)
1247 return node
1247 return node
1248 except error.LookupError:
1248 except error.LookupError:
1249 if not self._dirstatevalidatewarned:
1249 if not self._dirstatevalidatewarned:
1250 self._dirstatevalidatewarned = True
1250 self._dirstatevalidatewarned = True
1251 self.ui.warn(_("warning: ignoring unknown"
1251 self.ui.warn(_("warning: ignoring unknown"
1252 " working parent %s!\n") % short(node))
1252 " working parent %s!\n") % short(node))
1253 return nullid
1253 return nullid
1254
1254
1255 @storecache(narrowspec.FILENAME)
1255 @storecache(narrowspec.FILENAME)
1256 def narrowpats(self):
1256 def narrowpats(self):
1257 """matcher patterns for this repository's narrowspec
1257 """matcher patterns for this repository's narrowspec
1258
1258
1259 A tuple of (includes, excludes).
1259 A tuple of (includes, excludes).
1260 """
1260 """
1261 return narrowspec.load(self)
1261 return narrowspec.load(self)
1262
1262
1263 @storecache(narrowspec.FILENAME)
1263 @storecache(narrowspec.FILENAME)
1264 def _storenarrowmatch(self):
1264 def _storenarrowmatch(self):
1265 if repository.NARROW_REQUIREMENT not in self.requirements:
1265 if repository.NARROW_REQUIREMENT not in self.requirements:
1266 return matchmod.always()
1266 return matchmod.always()
1267 include, exclude = self.narrowpats
1267 include, exclude = self.narrowpats
1268 return narrowspec.match(self.root, include=include, exclude=exclude)
1268 return narrowspec.match(self.root, include=include, exclude=exclude)
1269
1269
1270 @storecache(narrowspec.FILENAME)
1270 @storecache(narrowspec.FILENAME)
1271 def _narrowmatch(self):
1271 def _narrowmatch(self):
1272 if repository.NARROW_REQUIREMENT not in self.requirements:
1272 if repository.NARROW_REQUIREMENT not in self.requirements:
1273 return matchmod.always()
1273 return matchmod.always()
1274 narrowspec.checkworkingcopynarrowspec(self)
1274 narrowspec.checkworkingcopynarrowspec(self)
1275 include, exclude = self.narrowpats
1275 include, exclude = self.narrowpats
1276 return narrowspec.match(self.root, include=include, exclude=exclude)
1276 return narrowspec.match(self.root, include=include, exclude=exclude)
1277
1277
1278 def narrowmatch(self, match=None, includeexact=False):
1278 def narrowmatch(self, match=None, includeexact=False):
1279 """matcher corresponding the the repo's narrowspec
1279 """matcher corresponding the the repo's narrowspec
1280
1280
1281 If `match` is given, then that will be intersected with the narrow
1281 If `match` is given, then that will be intersected with the narrow
1282 matcher.
1282 matcher.
1283
1283
1284 If `includeexact` is True, then any exact matches from `match` will
1284 If `includeexact` is True, then any exact matches from `match` will
1285 be included even if they're outside the narrowspec.
1285 be included even if they're outside the narrowspec.
1286 """
1286 """
1287 if match:
1287 if match:
1288 if includeexact and not self._narrowmatch.always():
1288 if includeexact and not self._narrowmatch.always():
1289 # do not exclude explicitly-specified paths so that they can
1289 # do not exclude explicitly-specified paths so that they can
1290 # be warned later on
1290 # be warned later on
1291 em = matchmod.exact(match.files())
1291 em = matchmod.exact(match.files())
1292 nm = matchmod.unionmatcher([self._narrowmatch, em])
1292 nm = matchmod.unionmatcher([self._narrowmatch, em])
1293 return matchmod.intersectmatchers(match, nm)
1293 return matchmod.intersectmatchers(match, nm)
1294 return matchmod.intersectmatchers(match, self._narrowmatch)
1294 return matchmod.intersectmatchers(match, self._narrowmatch)
1295 return self._narrowmatch
1295 return self._narrowmatch
1296
1296
1297 def setnarrowpats(self, newincludes, newexcludes):
1297 def setnarrowpats(self, newincludes, newexcludes):
1298 narrowspec.save(self, newincludes, newexcludes)
1298 narrowspec.save(self, newincludes, newexcludes)
1299 self.invalidate(clearfilecache=True)
1299 self.invalidate(clearfilecache=True)
1300
1300
1301 def __getitem__(self, changeid):
1301 def __getitem__(self, changeid):
1302 if changeid is None:
1302 if changeid is None:
1303 return context.workingctx(self)
1303 return context.workingctx(self)
1304 if isinstance(changeid, context.basectx):
1304 if isinstance(changeid, context.basectx):
1305 return changeid
1305 return changeid
1306 if isinstance(changeid, slice):
1306 if isinstance(changeid, slice):
1307 # wdirrev isn't contiguous so the slice shouldn't include it
1307 # wdirrev isn't contiguous so the slice shouldn't include it
1308 return [self[i]
1308 return [self[i]
1309 for i in pycompat.xrange(*changeid.indices(len(self)))
1309 for i in pycompat.xrange(*changeid.indices(len(self)))
1310 if i not in self.changelog.filteredrevs]
1310 if i not in self.changelog.filteredrevs]
1311 try:
1311 try:
1312 if isinstance(changeid, int):
1312 if isinstance(changeid, int):
1313 node = self.changelog.node(changeid)
1313 node = self.changelog.node(changeid)
1314 rev = changeid
1314 rev = changeid
1315 elif changeid == 'null':
1315 elif changeid == 'null':
1316 node = nullid
1316 node = nullid
1317 rev = nullrev
1317 rev = nullrev
1318 elif changeid == 'tip':
1318 elif changeid == 'tip':
1319 node = self.changelog.tip()
1319 node = self.changelog.tip()
1320 rev = self.changelog.rev(node)
1320 rev = self.changelog.rev(node)
1321 elif changeid == '.':
1321 elif changeid == '.':
1322 # this is a hack to delay/avoid loading obsmarkers
1322 # this is a hack to delay/avoid loading obsmarkers
1323 # when we know that '.' won't be hidden
1323 # when we know that '.' won't be hidden
1324 node = self.dirstate.p1()
1324 node = self.dirstate.p1()
1325 rev = self.unfiltered().changelog.rev(node)
1325 rev = self.unfiltered().changelog.rev(node)
1326 elif len(changeid) == 20:
1326 elif len(changeid) == 20:
1327 try:
1327 try:
1328 node = changeid
1328 node = changeid
1329 rev = self.changelog.rev(changeid)
1329 rev = self.changelog.rev(changeid)
1330 except error.FilteredLookupError:
1330 except error.FilteredLookupError:
1331 changeid = hex(changeid) # for the error message
1331 changeid = hex(changeid) # for the error message
1332 raise
1332 raise
1333 except LookupError:
1333 except LookupError:
1334 # check if it might have come from damaged dirstate
1334 # check if it might have come from damaged dirstate
1335 #
1335 #
1336 # XXX we could avoid the unfiltered if we had a recognizable
1336 # XXX we could avoid the unfiltered if we had a recognizable
1337 # exception for filtered changeset access
1337 # exception for filtered changeset access
1338 if (self.local()
1338 if (self.local()
1339 and changeid in self.unfiltered().dirstate.parents()):
1339 and changeid in self.unfiltered().dirstate.parents()):
1340 msg = _("working directory has unknown parent '%s'!")
1340 msg = _("working directory has unknown parent '%s'!")
1341 raise error.Abort(msg % short(changeid))
1341 raise error.Abort(msg % short(changeid))
1342 changeid = hex(changeid) # for the error message
1342 changeid = hex(changeid) # for the error message
1343 raise
1343 raise
1344
1344
1345 elif len(changeid) == 40:
1345 elif len(changeid) == 40:
1346 node = bin(changeid)
1346 node = bin(changeid)
1347 rev = self.changelog.rev(node)
1347 rev = self.changelog.rev(node)
1348 else:
1348 else:
1349 raise error.ProgrammingError(
1349 raise error.ProgrammingError(
1350 "unsupported changeid '%s' of type %s" %
1350 "unsupported changeid '%s' of type %s" %
1351 (changeid, type(changeid)))
1351 (changeid, type(changeid)))
1352
1352
1353 return context.changectx(self, rev, node)
1353 return context.changectx(self, rev, node)
1354
1354
1355 except (error.FilteredIndexError, error.FilteredLookupError):
1355 except (error.FilteredIndexError, error.FilteredLookupError):
1356 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
1356 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
1357 % pycompat.bytestr(changeid))
1357 % pycompat.bytestr(changeid))
1358 except (IndexError, LookupError):
1358 except (IndexError, LookupError):
1359 raise error.RepoLookupError(
1359 raise error.RepoLookupError(
1360 _("unknown revision '%s'") % pycompat.bytestr(changeid))
1360 _("unknown revision '%s'") % pycompat.bytestr(changeid))
1361 except error.WdirUnsupported:
1361 except error.WdirUnsupported:
1362 return context.workingctx(self)
1362 return context.workingctx(self)
1363
1363
1364 def __contains__(self, changeid):
1364 def __contains__(self, changeid):
1365 """True if the given changeid exists
1365 """True if the given changeid exists
1366
1366
1367 error.AmbiguousPrefixLookupError is raised if an ambiguous node
1367 error.AmbiguousPrefixLookupError is raised if an ambiguous node
1368 specified.
1368 specified.
1369 """
1369 """
1370 try:
1370 try:
1371 self[changeid]
1371 self[changeid]
1372 return True
1372 return True
1373 except error.RepoLookupError:
1373 except error.RepoLookupError:
1374 return False
1374 return False
1375
1375
1376 def __nonzero__(self):
1376 def __nonzero__(self):
1377 return True
1377 return True
1378
1378
1379 __bool__ = __nonzero__
1379 __bool__ = __nonzero__
1380
1380
1381 def __len__(self):
1381 def __len__(self):
1382 # no need to pay the cost of repoview.changelog
1382 # no need to pay the cost of repoview.changelog
1383 unfi = self.unfiltered()
1383 unfi = self.unfiltered()
1384 return len(unfi.changelog)
1384 return len(unfi.changelog)
1385
1385
1386 def __iter__(self):
1386 def __iter__(self):
1387 return iter(self.changelog)
1387 return iter(self.changelog)
1388
1388
1389 def revs(self, expr, *args):
1389 def revs(self, expr, *args):
1390 '''Find revisions matching a revset.
1390 '''Find revisions matching a revset.
1391
1391
1392 The revset is specified as a string ``expr`` that may contain
1392 The revset is specified as a string ``expr`` that may contain
1393 %-formatting to escape certain types. See ``revsetlang.formatspec``.
1393 %-formatting to escape certain types. See ``revsetlang.formatspec``.
1394
1394
1395 Revset aliases from the configuration are not expanded. To expand
1395 Revset aliases from the configuration are not expanded. To expand
1396 user aliases, consider calling ``scmutil.revrange()`` or
1396 user aliases, consider calling ``scmutil.revrange()`` or
1397 ``repo.anyrevs([expr], user=True)``.
1397 ``repo.anyrevs([expr], user=True)``.
1398
1398
1399 Returns a revset.abstractsmartset, which is a list-like interface
1399 Returns a revset.abstractsmartset, which is a list-like interface
1400 that contains integer revisions.
1400 that contains integer revisions.
1401 '''
1401 '''
1402 tree = revsetlang.spectree(expr, *args)
1402 tree = revsetlang.spectree(expr, *args)
1403 return revset.makematcher(tree)(self)
1403 return revset.makematcher(tree)(self)
1404
1404
1405 def set(self, expr, *args):
1405 def set(self, expr, *args):
1406 '''Find revisions matching a revset and emit changectx instances.
1406 '''Find revisions matching a revset and emit changectx instances.
1407
1407
1408 This is a convenience wrapper around ``revs()`` that iterates the
1408 This is a convenience wrapper around ``revs()`` that iterates the
1409 result and is a generator of changectx instances.
1409 result and is a generator of changectx instances.
1410
1410
1411 Revset aliases from the configuration are not expanded. To expand
1411 Revset aliases from the configuration are not expanded. To expand
1412 user aliases, consider calling ``scmutil.revrange()``.
1412 user aliases, consider calling ``scmutil.revrange()``.
1413 '''
1413 '''
1414 for r in self.revs(expr, *args):
1414 for r in self.revs(expr, *args):
1415 yield self[r]
1415 yield self[r]
1416
1416
1417 def anyrevs(self, specs, user=False, localalias=None):
1417 def anyrevs(self, specs, user=False, localalias=None):
1418 '''Find revisions matching one of the given revsets.
1418 '''Find revisions matching one of the given revsets.
1419
1419
1420 Revset aliases from the configuration are not expanded by default. To
1420 Revset aliases from the configuration are not expanded by default. To
1421 expand user aliases, specify ``user=True``. To provide some local
1421 expand user aliases, specify ``user=True``. To provide some local
1422 definitions overriding user aliases, set ``localalias`` to
1422 definitions overriding user aliases, set ``localalias`` to
1423 ``{name: definitionstring}``.
1423 ``{name: definitionstring}``.
1424 '''
1424 '''
1425 if user:
1425 if user:
1426 m = revset.matchany(self.ui, specs,
1426 m = revset.matchany(self.ui, specs,
1427 lookup=revset.lookupfn(self),
1427 lookup=revset.lookupfn(self),
1428 localalias=localalias)
1428 localalias=localalias)
1429 else:
1429 else:
1430 m = revset.matchany(None, specs, localalias=localalias)
1430 m = revset.matchany(None, specs, localalias=localalias)
1431 return m(self)
1431 return m(self)
1432
1432
1433 def url(self):
1433 def url(self):
1434 return 'file:' + self.root
1434 return 'file:' + self.root
1435
1435
1436 def hook(self, name, throw=False, **args):
1436 def hook(self, name, throw=False, **args):
1437 """Call a hook, passing this repo instance.
1437 """Call a hook, passing this repo instance.
1438
1438
1439 This a convenience method to aid invoking hooks. Extensions likely
1439 This a convenience method to aid invoking hooks. Extensions likely
1440 won't call this unless they have registered a custom hook or are
1440 won't call this unless they have registered a custom hook or are
1441 replacing code that is expected to call a hook.
1441 replacing code that is expected to call a hook.
1442 """
1442 """
1443 return hook.hook(self.ui, self, name, throw, **args)
1443 return hook.hook(self.ui, self, name, throw, **args)
1444
1444
1445 @filteredpropertycache
1445 @filteredpropertycache
1446 def _tagscache(self):
1446 def _tagscache(self):
1447 '''Returns a tagscache object that contains various tags related
1447 '''Returns a tagscache object that contains various tags related
1448 caches.'''
1448 caches.'''
1449
1449
1450 # This simplifies its cache management by having one decorated
1450 # This simplifies its cache management by having one decorated
1451 # function (this one) and the rest simply fetch things from it.
1451 # function (this one) and the rest simply fetch things from it.
1452 class tagscache(object):
1452 class tagscache(object):
1453 def __init__(self):
1453 def __init__(self):
1454 # These two define the set of tags for this repository. tags
1454 # These two define the set of tags for this repository. tags
1455 # maps tag name to node; tagtypes maps tag name to 'global' or
1455 # maps tag name to node; tagtypes maps tag name to 'global' or
1456 # 'local'. (Global tags are defined by .hgtags across all
1456 # 'local'. (Global tags are defined by .hgtags across all
1457 # heads, and local tags are defined in .hg/localtags.)
1457 # heads, and local tags are defined in .hg/localtags.)
1458 # They constitute the in-memory cache of tags.
1458 # They constitute the in-memory cache of tags.
1459 self.tags = self.tagtypes = None
1459 self.tags = self.tagtypes = None
1460
1460
1461 self.nodetagscache = self.tagslist = None
1461 self.nodetagscache = self.tagslist = None
1462
1462
1463 cache = tagscache()
1463 cache = tagscache()
1464 cache.tags, cache.tagtypes = self._findtags()
1464 cache.tags, cache.tagtypes = self._findtags()
1465
1465
1466 return cache
1466 return cache
1467
1467
1468 def tags(self):
1468 def tags(self):
1469 '''return a mapping of tag to node'''
1469 '''return a mapping of tag to node'''
1470 t = {}
1470 t = {}
1471 if self.changelog.filteredrevs:
1471 if self.changelog.filteredrevs:
1472 tags, tt = self._findtags()
1472 tags, tt = self._findtags()
1473 else:
1473 else:
1474 tags = self._tagscache.tags
1474 tags = self._tagscache.tags
1475 rev = self.changelog.rev
1475 rev = self.changelog.rev
1476 for k, v in tags.iteritems():
1476 for k, v in tags.iteritems():
1477 try:
1477 try:
1478 # ignore tags to unknown nodes
1478 # ignore tags to unknown nodes
1479 rev(v)
1479 rev(v)
1480 t[k] = v
1480 t[k] = v
1481 except (error.LookupError, ValueError):
1481 except (error.LookupError, ValueError):
1482 pass
1482 pass
1483 return t
1483 return t
1484
1484
1485 def _findtags(self):
1485 def _findtags(self):
1486 '''Do the hard work of finding tags. Return a pair of dicts
1486 '''Do the hard work of finding tags. Return a pair of dicts
1487 (tags, tagtypes) where tags maps tag name to node, and tagtypes
1487 (tags, tagtypes) where tags maps tag name to node, and tagtypes
1488 maps tag name to a string like \'global\' or \'local\'.
1488 maps tag name to a string like \'global\' or \'local\'.
1489 Subclasses or extensions are free to add their own tags, but
1489 Subclasses or extensions are free to add their own tags, but
1490 should be aware that the returned dicts will be retained for the
1490 should be aware that the returned dicts will be retained for the
1491 duration of the localrepo object.'''
1491 duration of the localrepo object.'''
1492
1492
1493 # XXX what tagtype should subclasses/extensions use? Currently
1493 # XXX what tagtype should subclasses/extensions use? Currently
1494 # mq and bookmarks add tags, but do not set the tagtype at all.
1494 # mq and bookmarks add tags, but do not set the tagtype at all.
1495 # Should each extension invent its own tag type? Should there
1495 # Should each extension invent its own tag type? Should there
1496 # be one tagtype for all such "virtual" tags? Or is the status
1496 # be one tagtype for all such "virtual" tags? Or is the status
1497 # quo fine?
1497 # quo fine?
1498
1498
1499
1499
1500 # map tag name to (node, hist)
1500 # map tag name to (node, hist)
1501 alltags = tagsmod.findglobaltags(self.ui, self)
1501 alltags = tagsmod.findglobaltags(self.ui, self)
1502 # map tag name to tag type
1502 # map tag name to tag type
1503 tagtypes = dict((tag, 'global') for tag in alltags)
1503 tagtypes = dict((tag, 'global') for tag in alltags)
1504
1504
1505 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
1505 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
1506
1506
1507 # Build the return dicts. Have to re-encode tag names because
1507 # Build the return dicts. Have to re-encode tag names because
1508 # the tags module always uses UTF-8 (in order not to lose info
1508 # the tags module always uses UTF-8 (in order not to lose info
1509 # writing to the cache), but the rest of Mercurial wants them in
1509 # writing to the cache), but the rest of Mercurial wants them in
1510 # local encoding.
1510 # local encoding.
1511 tags = {}
1511 tags = {}
1512 for (name, (node, hist)) in alltags.iteritems():
1512 for (name, (node, hist)) in alltags.iteritems():
1513 if node != nullid:
1513 if node != nullid:
1514 tags[encoding.tolocal(name)] = node
1514 tags[encoding.tolocal(name)] = node
1515 tags['tip'] = self.changelog.tip()
1515 tags['tip'] = self.changelog.tip()
1516 tagtypes = dict([(encoding.tolocal(name), value)
1516 tagtypes = dict([(encoding.tolocal(name), value)
1517 for (name, value) in tagtypes.iteritems()])
1517 for (name, value) in tagtypes.iteritems()])
1518 return (tags, tagtypes)
1518 return (tags, tagtypes)
1519
1519
1520 def tagtype(self, tagname):
1520 def tagtype(self, tagname):
1521 '''
1521 '''
1522 return the type of the given tag. result can be:
1522 return the type of the given tag. result can be:
1523
1523
1524 'local' : a local tag
1524 'local' : a local tag
1525 'global' : a global tag
1525 'global' : a global tag
1526 None : tag does not exist
1526 None : tag does not exist
1527 '''
1527 '''
1528
1528
1529 return self._tagscache.tagtypes.get(tagname)
1529 return self._tagscache.tagtypes.get(tagname)
1530
1530
1531 def tagslist(self):
1531 def tagslist(self):
1532 '''return a list of tags ordered by revision'''
1532 '''return a list of tags ordered by revision'''
1533 if not self._tagscache.tagslist:
1533 if not self._tagscache.tagslist:
1534 l = []
1534 l = []
1535 for t, n in self.tags().iteritems():
1535 for t, n in self.tags().iteritems():
1536 l.append((self.changelog.rev(n), t, n))
1536 l.append((self.changelog.rev(n), t, n))
1537 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
1537 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
1538
1538
1539 return self._tagscache.tagslist
1539 return self._tagscache.tagslist
1540
1540
1541 def nodetags(self, node):
1541 def nodetags(self, node):
1542 '''return the tags associated with a node'''
1542 '''return the tags associated with a node'''
1543 if not self._tagscache.nodetagscache:
1543 if not self._tagscache.nodetagscache:
1544 nodetagscache = {}
1544 nodetagscache = {}
1545 for t, n in self._tagscache.tags.iteritems():
1545 for t, n in self._tagscache.tags.iteritems():
1546 nodetagscache.setdefault(n, []).append(t)
1546 nodetagscache.setdefault(n, []).append(t)
1547 for tags in nodetagscache.itervalues():
1547 for tags in nodetagscache.itervalues():
1548 tags.sort()
1548 tags.sort()
1549 self._tagscache.nodetagscache = nodetagscache
1549 self._tagscache.nodetagscache = nodetagscache
1550 return self._tagscache.nodetagscache.get(node, [])
1550 return self._tagscache.nodetagscache.get(node, [])
1551
1551
1552 def nodebookmarks(self, node):
1552 def nodebookmarks(self, node):
1553 """return the list of bookmarks pointing to the specified node"""
1553 """return the list of bookmarks pointing to the specified node"""
1554 return self._bookmarks.names(node)
1554 return self._bookmarks.names(node)
1555
1555
1556 def branchmap(self):
1556 def branchmap(self):
1557 '''returns a dictionary {branch: [branchheads]} with branchheads
1557 '''returns a dictionary {branch: [branchheads]} with branchheads
1558 ordered by increasing revision number'''
1558 ordered by increasing revision number'''
1559 return self._branchcaches[self]
1559 return self._branchcaches[self]
1560
1560
1561 @unfilteredmethod
1561 @unfilteredmethod
1562 def revbranchcache(self):
1562 def revbranchcache(self):
1563 if not self._revbranchcache:
1563 if not self._revbranchcache:
1564 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
1564 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
1565 return self._revbranchcache
1565 return self._revbranchcache
1566
1566
1567 def branchtip(self, branch, ignoremissing=False):
1567 def branchtip(self, branch, ignoremissing=False):
1568 '''return the tip node for a given branch
1568 '''return the tip node for a given branch
1569
1569
1570 If ignoremissing is True, then this method will not raise an error.
1570 If ignoremissing is True, then this method will not raise an error.
1571 This is helpful for callers that only expect None for a missing branch
1571 This is helpful for callers that only expect None for a missing branch
1572 (e.g. namespace).
1572 (e.g. namespace).
1573
1573
1574 '''
1574 '''
1575 try:
1575 try:
1576 return self.branchmap().branchtip(branch)
1576 return self.branchmap().branchtip(branch)
1577 except KeyError:
1577 except KeyError:
1578 if not ignoremissing:
1578 if not ignoremissing:
1579 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
1579 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
1580 else:
1580 else:
1581 pass
1581 pass
1582
1582
1583 def lookup(self, key):
1583 def lookup(self, key):
1584 node = scmutil.revsymbol(self, key).node()
1584 node = scmutil.revsymbol(self, key).node()
1585 if node is None:
1585 if node is None:
1586 raise error.RepoLookupError(_("unknown revision '%s'") % key)
1586 raise error.RepoLookupError(_("unknown revision '%s'") % key)
1587 return node
1587 return node
1588
1588
1589 def lookupbranch(self, key):
1589 def lookupbranch(self, key):
1590 if self.branchmap().hasbranch(key):
1590 if self.branchmap().hasbranch(key):
1591 return key
1591 return key
1592
1592
1593 return scmutil.revsymbol(self, key).branch()
1593 return scmutil.revsymbol(self, key).branch()
1594
1594
1595 def known(self, nodes):
1595 def known(self, nodes):
1596 cl = self.changelog
1596 cl = self.changelog
1597 nm = cl.nodemap
1597 nm = cl.nodemap
1598 filtered = cl.filteredrevs
1598 filtered = cl.filteredrevs
1599 result = []
1599 result = []
1600 for n in nodes:
1600 for n in nodes:
1601 r = nm.get(n)
1601 r = nm.get(n)
1602 resp = not (r is None or r in filtered)
1602 resp = not (r is None or r in filtered)
1603 result.append(resp)
1603 result.append(resp)
1604 return result
1604 return result
1605
1605
1606 def local(self):
1606 def local(self):
1607 return self
1607 return self
1608
1608
1609 def publishing(self):
1609 def publishing(self):
1610 # it's safe (and desirable) to trust the publish flag unconditionally
1610 # it's safe (and desirable) to trust the publish flag unconditionally
1611 # so that we don't finalize changes shared between users via ssh or nfs
1611 # so that we don't finalize changes shared between users via ssh or nfs
1612 return self.ui.configbool('phases', 'publish', untrusted=True)
1612 return self.ui.configbool('phases', 'publish', untrusted=True)
1613
1613
1614 def cancopy(self):
1614 def cancopy(self):
1615 # so statichttprepo's override of local() works
1615 # so statichttprepo's override of local() works
1616 if not self.local():
1616 if not self.local():
1617 return False
1617 return False
1618 if not self.publishing():
1618 if not self.publishing():
1619 return True
1619 return True
1620 # if publishing we can't copy if there is filtered content
1620 # if publishing we can't copy if there is filtered content
1621 return not self.filtered('visible').changelog.filteredrevs
1621 return not self.filtered('visible').changelog.filteredrevs
1622
1622
1623 def shared(self):
1623 def shared(self):
1624 '''the type of shared repository (None if not shared)'''
1624 '''the type of shared repository (None if not shared)'''
1625 if self.sharedpath != self.path:
1625 if self.sharedpath != self.path:
1626 return 'store'
1626 return 'store'
1627 return None
1627 return None
1628
1628
1629 def wjoin(self, f, *insidef):
1629 def wjoin(self, f, *insidef):
1630 return self.vfs.reljoin(self.root, f, *insidef)
1630 return self.vfs.reljoin(self.root, f, *insidef)
1631
1631
1632 def setparents(self, p1, p2=nullid):
1632 def setparents(self, p1, p2=nullid):
1633 with self.dirstate.parentchange():
1633 with self.dirstate.parentchange():
1634 copies = self.dirstate.setparents(p1, p2)
1634 copies = self.dirstate.setparents(p1, p2)
1635 pctx = self[p1]
1635 pctx = self[p1]
1636 if copies:
1636 if copies:
1637 # Adjust copy records, the dirstate cannot do it, it
1637 # Adjust copy records, the dirstate cannot do it, it
1638 # requires access to parents manifests. Preserve them
1638 # requires access to parents manifests. Preserve them
1639 # only for entries added to first parent.
1639 # only for entries added to first parent.
1640 for f in copies:
1640 for f in copies:
1641 if f not in pctx and copies[f] in pctx:
1641 if f not in pctx and copies[f] in pctx:
1642 self.dirstate.copy(copies[f], f)
1642 self.dirstate.copy(copies[f], f)
1643 if p2 == nullid:
1643 if p2 == nullid:
1644 for f, s in sorted(self.dirstate.copies().items()):
1644 for f, s in sorted(self.dirstate.copies().items()):
1645 if f not in pctx and s not in pctx:
1645 if f not in pctx and s not in pctx:
1646 self.dirstate.copy(None, f)
1646 self.dirstate.copy(None, f)
1647
1647
1648 def filectx(self, path, changeid=None, fileid=None, changectx=None):
1648 def filectx(self, path, changeid=None, fileid=None, changectx=None):
1649 """changeid must be a changeset revision, if specified.
1649 """changeid must be a changeset revision, if specified.
1650 fileid can be a file revision or node."""
1650 fileid can be a file revision or node."""
1651 return context.filectx(self, path, changeid, fileid,
1651 return context.filectx(self, path, changeid, fileid,
1652 changectx=changectx)
1652 changectx=changectx)
1653
1653
1654 def getcwd(self):
1654 def getcwd(self):
1655 return self.dirstate.getcwd()
1655 return self.dirstate.getcwd()
1656
1656
1657 def pathto(self, f, cwd=None):
1657 def pathto(self, f, cwd=None):
1658 return self.dirstate.pathto(f, cwd)
1658 return self.dirstate.pathto(f, cwd)
1659
1659
1660 def _loadfilter(self, filter):
1660 def _loadfilter(self, filter):
1661 if filter not in self._filterpats:
1661 if filter not in self._filterpats:
1662 l = []
1662 l = []
1663 for pat, cmd in self.ui.configitems(filter):
1663 for pat, cmd in self.ui.configitems(filter):
1664 if cmd == '!':
1664 if cmd == '!':
1665 continue
1665 continue
1666 mf = matchmod.match(self.root, '', [pat])
1666 mf = matchmod.match(self.root, '', [pat])
1667 fn = None
1667 fn = None
1668 params = cmd
1668 params = cmd
1669 for name, filterfn in self._datafilters.iteritems():
1669 for name, filterfn in self._datafilters.iteritems():
1670 if cmd.startswith(name):
1670 if cmd.startswith(name):
1671 fn = filterfn
1671 fn = filterfn
1672 params = cmd[len(name):].lstrip()
1672 params = cmd[len(name):].lstrip()
1673 break
1673 break
1674 if not fn:
1674 if not fn:
1675 fn = lambda s, c, **kwargs: procutil.filter(s, c)
1675 fn = lambda s, c, **kwargs: procutil.filter(s, c)
1676 # Wrap old filters not supporting keyword arguments
1676 # Wrap old filters not supporting keyword arguments
1677 if not pycompat.getargspec(fn)[2]:
1677 if not pycompat.getargspec(fn)[2]:
1678 oldfn = fn
1678 oldfn = fn
1679 fn = lambda s, c, **kwargs: oldfn(s, c)
1679 fn = lambda s, c, **kwargs: oldfn(s, c)
1680 l.append((mf, fn, params))
1680 l.append((mf, fn, params))
1681 self._filterpats[filter] = l
1681 self._filterpats[filter] = l
1682 return self._filterpats[filter]
1682 return self._filterpats[filter]
1683
1683
1684 def _filter(self, filterpats, filename, data):
1684 def _filter(self, filterpats, filename, data):
1685 for mf, fn, cmd in filterpats:
1685 for mf, fn, cmd in filterpats:
1686 if mf(filename):
1686 if mf(filename):
1687 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1687 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1688 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1688 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1689 break
1689 break
1690
1690
1691 return data
1691 return data
1692
1692
1693 @unfilteredpropertycache
1693 @unfilteredpropertycache
1694 def _encodefilterpats(self):
1694 def _encodefilterpats(self):
1695 return self._loadfilter('encode')
1695 return self._loadfilter('encode')
1696
1696
1697 @unfilteredpropertycache
1697 @unfilteredpropertycache
1698 def _decodefilterpats(self):
1698 def _decodefilterpats(self):
1699 return self._loadfilter('decode')
1699 return self._loadfilter('decode')
1700
1700
1701 def adddatafilter(self, name, filter):
1701 def adddatafilter(self, name, filter):
1702 self._datafilters[name] = filter
1702 self._datafilters[name] = filter
1703
1703
1704 def wread(self, filename):
1704 def wread(self, filename):
1705 if self.wvfs.islink(filename):
1705 if self.wvfs.islink(filename):
1706 data = self.wvfs.readlink(filename)
1706 data = self.wvfs.readlink(filename)
1707 else:
1707 else:
1708 data = self.wvfs.read(filename)
1708 data = self.wvfs.read(filename)
1709 return self._filter(self._encodefilterpats, filename, data)
1709 return self._filter(self._encodefilterpats, filename, data)
1710
1710
1711 def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs):
1711 def wwrite(self, filename, data, flags, backgroundclose=False, **kwargs):
1712 """write ``data`` into ``filename`` in the working directory
1712 """write ``data`` into ``filename`` in the working directory
1713
1713
1714 This returns length of written (maybe decoded) data.
1714 This returns length of written (maybe decoded) data.
1715 """
1715 """
1716 data = self._filter(self._decodefilterpats, filename, data)
1716 data = self._filter(self._decodefilterpats, filename, data)
1717 if 'l' in flags:
1717 if 'l' in flags:
1718 self.wvfs.symlink(data, filename)
1718 self.wvfs.symlink(data, filename)
1719 else:
1719 else:
1720 self.wvfs.write(filename, data, backgroundclose=backgroundclose,
1720 self.wvfs.write(filename, data, backgroundclose=backgroundclose,
1721 **kwargs)
1721 **kwargs)
1722 if 'x' in flags:
1722 if 'x' in flags:
1723 self.wvfs.setflags(filename, False, True)
1723 self.wvfs.setflags(filename, False, True)
1724 else:
1724 else:
1725 self.wvfs.setflags(filename, False, False)
1725 self.wvfs.setflags(filename, False, False)
1726 return len(data)
1726 return len(data)
1727
1727
1728 def wwritedata(self, filename, data):
1728 def wwritedata(self, filename, data):
1729 return self._filter(self._decodefilterpats, filename, data)
1729 return self._filter(self._decodefilterpats, filename, data)
1730
1730
1731 def currenttransaction(self):
1731 def currenttransaction(self):
1732 """return the current transaction or None if non exists"""
1732 """return the current transaction or None if non exists"""
1733 if self._transref:
1733 if self._transref:
1734 tr = self._transref()
1734 tr = self._transref()
1735 else:
1735 else:
1736 tr = None
1736 tr = None
1737
1737
1738 if tr and tr.running():
1738 if tr and tr.running():
1739 return tr
1739 return tr
1740 return None
1740 return None
1741
1741
1742 def transaction(self, desc, report=None):
1742 def transaction(self, desc, report=None):
1743 if (self.ui.configbool('devel', 'all-warnings')
1743 if (self.ui.configbool('devel', 'all-warnings')
1744 or self.ui.configbool('devel', 'check-locks')):
1744 or self.ui.configbool('devel', 'check-locks')):
1745 if self._currentlock(self._lockref) is None:
1745 if self._currentlock(self._lockref) is None:
1746 raise error.ProgrammingError('transaction requires locking')
1746 raise error.ProgrammingError('transaction requires locking')
1747 tr = self.currenttransaction()
1747 tr = self.currenttransaction()
1748 if tr is not None:
1748 if tr is not None:
1749 return tr.nest(name=desc)
1749 return tr.nest(name=desc)
1750
1750
1751 # abort here if the journal already exists
1751 # abort here if the journal already exists
1752 if self.svfs.exists("journal"):
1752 if self.svfs.exists("journal"):
1753 raise error.RepoError(
1753 raise error.RepoError(
1754 _("abandoned transaction found"),
1754 _("abandoned transaction found"),
1755 hint=_("run 'hg recover' to clean up transaction"))
1755 hint=_("run 'hg recover' to clean up transaction"))
1756
1756
1757 idbase = "%.40f#%f" % (random.random(), time.time())
1757 idbase = "%.40f#%f" % (random.random(), time.time())
1758 ha = hex(hashlib.sha1(idbase).digest())
1758 ha = hex(hashlib.sha1(idbase).digest())
1759 txnid = 'TXN:' + ha
1759 txnid = 'TXN:' + ha
1760 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1760 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1761
1761
1762 self._writejournal(desc)
1762 self._writejournal(desc)
1763 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1763 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1764 if report:
1764 if report:
1765 rp = report
1765 rp = report
1766 else:
1766 else:
1767 rp = self.ui.warn
1767 rp = self.ui.warn
1768 vfsmap = {'plain': self.vfs, 'store': self.svfs} # root of .hg/
1768 vfsmap = {'plain': self.vfs, 'store': self.svfs} # root of .hg/
1769 # we must avoid cyclic reference between repo and transaction.
1769 # we must avoid cyclic reference between repo and transaction.
1770 reporef = weakref.ref(self)
1770 reporef = weakref.ref(self)
1771 # Code to track tag movement
1771 # Code to track tag movement
1772 #
1772 #
1773 # Since tags are all handled as file content, it is actually quite hard
1773 # Since tags are all handled as file content, it is actually quite hard
1774 # to track these movement from a code perspective. So we fallback to a
1774 # to track these movement from a code perspective. So we fallback to a
1775 # tracking at the repository level. One could envision to track changes
1775 # tracking at the repository level. One could envision to track changes
1776 # to the '.hgtags' file through changegroup apply but that fails to
1776 # to the '.hgtags' file through changegroup apply but that fails to
1777 # cope with case where transaction expose new heads without changegroup
1777 # cope with case where transaction expose new heads without changegroup
1778 # being involved (eg: phase movement).
1778 # being involved (eg: phase movement).
1779 #
1779 #
1780 # For now, We gate the feature behind a flag since this likely comes
1780 # For now, We gate the feature behind a flag since this likely comes
1781 # with performance impacts. The current code run more often than needed
1781 # with performance impacts. The current code run more often than needed
1782 # and do not use caches as much as it could. The current focus is on
1782 # and do not use caches as much as it could. The current focus is on
1783 # the behavior of the feature so we disable it by default. The flag
1783 # the behavior of the feature so we disable it by default. The flag
1784 # will be removed when we are happy with the performance impact.
1784 # will be removed when we are happy with the performance impact.
1785 #
1785 #
1786 # Once this feature is no longer experimental move the following
1786 # Once this feature is no longer experimental move the following
1787 # documentation to the appropriate help section:
1787 # documentation to the appropriate help section:
1788 #
1788 #
1789 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1789 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1790 # tags (new or changed or deleted tags). In addition the details of
1790 # tags (new or changed or deleted tags). In addition the details of
1791 # these changes are made available in a file at:
1791 # these changes are made available in a file at:
1792 # ``REPOROOT/.hg/changes/tags.changes``.
1792 # ``REPOROOT/.hg/changes/tags.changes``.
1793 # Make sure you check for HG_TAG_MOVED before reading that file as it
1793 # Make sure you check for HG_TAG_MOVED before reading that file as it
1794 # might exist from a previous transaction even if no tag were touched
1794 # might exist from a previous transaction even if no tag were touched
1795 # in this one. Changes are recorded in a line base format::
1795 # in this one. Changes are recorded in a line base format::
1796 #
1796 #
1797 # <action> <hex-node> <tag-name>\n
1797 # <action> <hex-node> <tag-name>\n
1798 #
1798 #
1799 # Actions are defined as follow:
1799 # Actions are defined as follow:
1800 # "-R": tag is removed,
1800 # "-R": tag is removed,
1801 # "+A": tag is added,
1801 # "+A": tag is added,
1802 # "-M": tag is moved (old value),
1802 # "-M": tag is moved (old value),
1803 # "+M": tag is moved (new value),
1803 # "+M": tag is moved (new value),
1804 tracktags = lambda x: None
1804 tracktags = lambda x: None
1805 # experimental config: experimental.hook-track-tags
1805 # experimental config: experimental.hook-track-tags
1806 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1806 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1807 if desc != 'strip' and shouldtracktags:
1807 if desc != 'strip' and shouldtracktags:
1808 oldheads = self.changelog.headrevs()
1808 oldheads = self.changelog.headrevs()
1809 def tracktags(tr2):
1809 def tracktags(tr2):
1810 repo = reporef()
1810 repo = reporef()
1811 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1811 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1812 newheads = repo.changelog.headrevs()
1812 newheads = repo.changelog.headrevs()
1813 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1813 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1814 # notes: we compare lists here.
1814 # notes: we compare lists here.
1815 # As we do it only once buiding set would not be cheaper
1815 # As we do it only once buiding set would not be cheaper
1816 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1816 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1817 if changes:
1817 if changes:
1818 tr2.hookargs['tag_moved'] = '1'
1818 tr2.hookargs['tag_moved'] = '1'
1819 with repo.vfs('changes/tags.changes', 'w',
1819 with repo.vfs('changes/tags.changes', 'w',
1820 atomictemp=True) as changesfile:
1820 atomictemp=True) as changesfile:
1821 # note: we do not register the file to the transaction
1821 # note: we do not register the file to the transaction
1822 # because we needs it to still exist on the transaction
1822 # because we needs it to still exist on the transaction
1823 # is close (for txnclose hooks)
1823 # is close (for txnclose hooks)
1824 tagsmod.writediff(changesfile, changes)
1824 tagsmod.writediff(changesfile, changes)
1825 def validate(tr2):
1825 def validate(tr2):
1826 """will run pre-closing hooks"""
1826 """will run pre-closing hooks"""
1827 # XXX the transaction API is a bit lacking here so we take a hacky
1827 # XXX the transaction API is a bit lacking here so we take a hacky
1828 # path for now
1828 # path for now
1829 #
1829 #
1830 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1830 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1831 # dict is copied before these run. In addition we needs the data
1831 # dict is copied before these run. In addition we needs the data
1832 # available to in memory hooks too.
1832 # available to in memory hooks too.
1833 #
1833 #
1834 # Moreover, we also need to make sure this runs before txnclose
1834 # Moreover, we also need to make sure this runs before txnclose
1835 # hooks and there is no "pending" mechanism that would execute
1835 # hooks and there is no "pending" mechanism that would execute
1836 # logic only if hooks are about to run.
1836 # logic only if hooks are about to run.
1837 #
1837 #
1838 # Fixing this limitation of the transaction is also needed to track
1838 # Fixing this limitation of the transaction is also needed to track
1839 # other families of changes (bookmarks, phases, obsolescence).
1839 # other families of changes (bookmarks, phases, obsolescence).
1840 #
1840 #
1841 # This will have to be fixed before we remove the experimental
1841 # This will have to be fixed before we remove the experimental
1842 # gating.
1842 # gating.
1843 tracktags(tr2)
1843 tracktags(tr2)
1844 repo = reporef()
1844 repo = reporef()
1845 if repo.ui.configbool('experimental', 'single-head-per-branch'):
1845 if repo.ui.configbool('experimental', 'single-head-per-branch'):
1846 scmutil.enforcesinglehead(repo, tr2, desc)
1846 scmutil.enforcesinglehead(repo, tr2, desc)
1847 if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
1847 if hook.hashook(repo.ui, 'pretxnclose-bookmark'):
1848 for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
1848 for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
1849 args = tr.hookargs.copy()
1849 args = tr.hookargs.copy()
1850 args.update(bookmarks.preparehookargs(name, old, new))
1850 args.update(bookmarks.preparehookargs(name, old, new))
1851 repo.hook('pretxnclose-bookmark', throw=True,
1851 repo.hook('pretxnclose-bookmark', throw=True,
1852 **pycompat.strkwargs(args))
1852 **pycompat.strkwargs(args))
1853 if hook.hashook(repo.ui, 'pretxnclose-phase'):
1853 if hook.hashook(repo.ui, 'pretxnclose-phase'):
1854 cl = repo.unfiltered().changelog
1854 cl = repo.unfiltered().changelog
1855 for rev, (old, new) in tr.changes['phases'].items():
1855 for rev, (old, new) in tr.changes['phases'].items():
1856 args = tr.hookargs.copy()
1856 args = tr.hookargs.copy()
1857 node = hex(cl.node(rev))
1857 node = hex(cl.node(rev))
1858 args.update(phases.preparehookargs(node, old, new))
1858 args.update(phases.preparehookargs(node, old, new))
1859 repo.hook('pretxnclose-phase', throw=True,
1859 repo.hook('pretxnclose-phase', throw=True,
1860 **pycompat.strkwargs(args))
1860 **pycompat.strkwargs(args))
1861
1861
1862 repo.hook('pretxnclose', throw=True,
1862 repo.hook('pretxnclose', throw=True,
1863 **pycompat.strkwargs(tr.hookargs))
1863 **pycompat.strkwargs(tr.hookargs))
1864 def releasefn(tr, success):
1864 def releasefn(tr, success):
1865 repo = reporef()
1865 repo = reporef()
1866 if success:
1866 if success:
1867 # this should be explicitly invoked here, because
1867 # this should be explicitly invoked here, because
1868 # in-memory changes aren't written out at closing
1868 # in-memory changes aren't written out at closing
1869 # transaction, if tr.addfilegenerator (via
1869 # transaction, if tr.addfilegenerator (via
1870 # dirstate.write or so) isn't invoked while
1870 # dirstate.write or so) isn't invoked while
1871 # transaction running
1871 # transaction running
1872 repo.dirstate.write(None)
1872 repo.dirstate.write(None)
1873 else:
1873 else:
1874 # discard all changes (including ones already written
1874 # discard all changes (including ones already written
1875 # out) in this transaction
1875 # out) in this transaction
1876 narrowspec.restorebackup(self, 'journal.narrowspec')
1876 narrowspec.restorebackup(self, 'journal.narrowspec')
1877 narrowspec.restorewcbackup(self, 'journal.narrowspec.dirstate')
1877 narrowspec.restorewcbackup(self, 'journal.narrowspec.dirstate')
1878 repo.dirstate.restorebackup(None, 'journal.dirstate')
1878 repo.dirstate.restorebackup(None, 'journal.dirstate')
1879
1879
1880 repo.invalidate(clearfilecache=True)
1880 repo.invalidate(clearfilecache=True)
1881
1881
1882 tr = transaction.transaction(rp, self.svfs, vfsmap,
1882 tr = transaction.transaction(rp, self.svfs, vfsmap,
1883 "journal",
1883 "journal",
1884 "undo",
1884 "undo",
1885 aftertrans(renames),
1885 aftertrans(renames),
1886 self.store.createmode,
1886 self.store.createmode,
1887 validator=validate,
1887 validator=validate,
1888 releasefn=releasefn,
1888 releasefn=releasefn,
1889 checkambigfiles=_cachedfiles,
1889 checkambigfiles=_cachedfiles,
1890 name=desc)
1890 name=desc)
1891 tr.changes['origrepolen'] = len(self)
1891 tr.changes['origrepolen'] = len(self)
1892 tr.changes['obsmarkers'] = set()
1892 tr.changes['obsmarkers'] = set()
1893 tr.changes['phases'] = {}
1893 tr.changes['phases'] = {}
1894 tr.changes['bookmarks'] = {}
1894 tr.changes['bookmarks'] = {}
1895
1895
1896 tr.hookargs['txnid'] = txnid
1896 tr.hookargs['txnid'] = txnid
1897 tr.hookargs['txnname'] = desc
1897 tr.hookargs['txnname'] = desc
1898 # note: writing the fncache only during finalize mean that the file is
1898 # note: writing the fncache only during finalize mean that the file is
1899 # outdated when running hooks. As fncache is used for streaming clone,
1899 # outdated when running hooks. As fncache is used for streaming clone,
1900 # this is not expected to break anything that happen during the hooks.
1900 # this is not expected to break anything that happen during the hooks.
1901 tr.addfinalize('flush-fncache', self.store.write)
1901 tr.addfinalize('flush-fncache', self.store.write)
1902 def txnclosehook(tr2):
1902 def txnclosehook(tr2):
1903 """To be run if transaction is successful, will schedule a hook run
1903 """To be run if transaction is successful, will schedule a hook run
1904 """
1904 """
1905 # Don't reference tr2 in hook() so we don't hold a reference.
1905 # Don't reference tr2 in hook() so we don't hold a reference.
1906 # This reduces memory consumption when there are multiple
1906 # This reduces memory consumption when there are multiple
1907 # transactions per lock. This can likely go away if issue5045
1907 # transactions per lock. This can likely go away if issue5045
1908 # fixes the function accumulation.
1908 # fixes the function accumulation.
1909 hookargs = tr2.hookargs
1909 hookargs = tr2.hookargs
1910
1910
1911 def hookfunc():
1911 def hookfunc():
1912 repo = reporef()
1912 repo = reporef()
1913 if hook.hashook(repo.ui, 'txnclose-bookmark'):
1913 if hook.hashook(repo.ui, 'txnclose-bookmark'):
1914 bmchanges = sorted(tr.changes['bookmarks'].items())
1914 bmchanges = sorted(tr.changes['bookmarks'].items())
1915 for name, (old, new) in bmchanges:
1915 for name, (old, new) in bmchanges:
1916 args = tr.hookargs.copy()
1916 args = tr.hookargs.copy()
1917 args.update(bookmarks.preparehookargs(name, old, new))
1917 args.update(bookmarks.preparehookargs(name, old, new))
1918 repo.hook('txnclose-bookmark', throw=False,
1918 repo.hook('txnclose-bookmark', throw=False,
1919 **pycompat.strkwargs(args))
1919 **pycompat.strkwargs(args))
1920
1920
1921 if hook.hashook(repo.ui, 'txnclose-phase'):
1921 if hook.hashook(repo.ui, 'txnclose-phase'):
1922 cl = repo.unfiltered().changelog
1922 cl = repo.unfiltered().changelog
1923 phasemv = sorted(tr.changes['phases'].items())
1923 phasemv = sorted(tr.changes['phases'].items())
1924 for rev, (old, new) in phasemv:
1924 for rev, (old, new) in phasemv:
1925 args = tr.hookargs.copy()
1925 args = tr.hookargs.copy()
1926 node = hex(cl.node(rev))
1926 node = hex(cl.node(rev))
1927 args.update(phases.preparehookargs(node, old, new))
1927 args.update(phases.preparehookargs(node, old, new))
1928 repo.hook('txnclose-phase', throw=False,
1928 repo.hook('txnclose-phase', throw=False,
1929 **pycompat.strkwargs(args))
1929 **pycompat.strkwargs(args))
1930
1930
1931 repo.hook('txnclose', throw=False,
1931 repo.hook('txnclose', throw=False,
1932 **pycompat.strkwargs(hookargs))
1932 **pycompat.strkwargs(hookargs))
1933 reporef()._afterlock(hookfunc)
1933 reporef()._afterlock(hookfunc)
1934 tr.addfinalize('txnclose-hook', txnclosehook)
1934 tr.addfinalize('txnclose-hook', txnclosehook)
1935 # Include a leading "-" to make it happen before the transaction summary
1935 # Include a leading "-" to make it happen before the transaction summary
1936 # reports registered via scmutil.registersummarycallback() whose names
1936 # reports registered via scmutil.registersummarycallback() whose names
1937 # are 00-txnreport etc. That way, the caches will be warm when the
1937 # are 00-txnreport etc. That way, the caches will be warm when the
1938 # callbacks run.
1938 # callbacks run.
1939 tr.addpostclose('-warm-cache', self._buildcacheupdater(tr))
1939 tr.addpostclose('-warm-cache', self._buildcacheupdater(tr))
1940 def txnaborthook(tr2):
1940 def txnaborthook(tr2):
1941 """To be run if transaction is aborted
1941 """To be run if transaction is aborted
1942 """
1942 """
1943 reporef().hook('txnabort', throw=False,
1943 reporef().hook('txnabort', throw=False,
1944 **pycompat.strkwargs(tr2.hookargs))
1944 **pycompat.strkwargs(tr2.hookargs))
1945 tr.addabort('txnabort-hook', txnaborthook)
1945 tr.addabort('txnabort-hook', txnaborthook)
1946 # avoid eager cache invalidation. in-memory data should be identical
1946 # avoid eager cache invalidation. in-memory data should be identical
1947 # to stored data if transaction has no error.
1947 # to stored data if transaction has no error.
1948 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1948 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1949 self._transref = weakref.ref(tr)
1949 self._transref = weakref.ref(tr)
1950 scmutil.registersummarycallback(self, tr, desc)
1950 scmutil.registersummarycallback(self, tr, desc)
1951 return tr
1951 return tr
1952
1952
1953 def _journalfiles(self):
1953 def _journalfiles(self):
1954 return ((self.svfs, 'journal'),
1954 return ((self.svfs, 'journal'),
1955 (self.svfs, 'journal.narrowspec'),
1955 (self.svfs, 'journal.narrowspec'),
1956 (self.vfs, 'journal.narrowspec.dirstate'),
1956 (self.vfs, 'journal.narrowspec.dirstate'),
1957 (self.vfs, 'journal.dirstate'),
1957 (self.vfs, 'journal.dirstate'),
1958 (self.vfs, 'journal.branch'),
1958 (self.vfs, 'journal.branch'),
1959 (self.vfs, 'journal.desc'),
1959 (self.vfs, 'journal.desc'),
1960 (self.vfs, 'journal.bookmarks'),
1960 (self.vfs, 'journal.bookmarks'),
1961 (self.svfs, 'journal.phaseroots'))
1961 (self.svfs, 'journal.phaseroots'))
1962
1962
1963 def undofiles(self):
1963 def undofiles(self):
1964 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1964 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1965
1965
1966 @unfilteredmethod
1966 @unfilteredmethod
1967 def _writejournal(self, desc):
1967 def _writejournal(self, desc):
1968 self.dirstate.savebackup(None, 'journal.dirstate')
1968 self.dirstate.savebackup(None, 'journal.dirstate')
1969 narrowspec.savewcbackup(self, 'journal.narrowspec.dirstate')
1969 narrowspec.savewcbackup(self, 'journal.narrowspec.dirstate')
1970 narrowspec.savebackup(self, 'journal.narrowspec')
1970 narrowspec.savebackup(self, 'journal.narrowspec')
1971 self.vfs.write("journal.branch",
1971 self.vfs.write("journal.branch",
1972 encoding.fromlocal(self.dirstate.branch()))
1972 encoding.fromlocal(self.dirstate.branch()))
1973 self.vfs.write("journal.desc",
1973 self.vfs.write("journal.desc",
1974 "%d\n%s\n" % (len(self), desc))
1974 "%d\n%s\n" % (len(self), desc))
1975 self.vfs.write("journal.bookmarks",
1975 self.vfs.write("journal.bookmarks",
1976 self.vfs.tryread("bookmarks"))
1976 self.vfs.tryread("bookmarks"))
1977 self.svfs.write("journal.phaseroots",
1977 self.svfs.write("journal.phaseroots",
1978 self.svfs.tryread("phaseroots"))
1978 self.svfs.tryread("phaseroots"))
1979
1979
1980 def recover(self):
1980 def recover(self):
1981 with self.lock():
1981 with self.lock():
1982 if self.svfs.exists("journal"):
1982 if self.svfs.exists("journal"):
1983 self.ui.status(_("rolling back interrupted transaction\n"))
1983 self.ui.status(_("rolling back interrupted transaction\n"))
1984 vfsmap = {'': self.svfs,
1984 vfsmap = {'': self.svfs,
1985 'plain': self.vfs,}
1985 'plain': self.vfs,}
1986 transaction.rollback(self.svfs, vfsmap, "journal",
1986 transaction.rollback(self.svfs, vfsmap, "journal",
1987 self.ui.warn,
1987 self.ui.warn,
1988 checkambigfiles=_cachedfiles)
1988 checkambigfiles=_cachedfiles)
1989 self.invalidate()
1989 self.invalidate()
1990 return True
1990 return True
1991 else:
1991 else:
1992 self.ui.warn(_("no interrupted transaction available\n"))
1992 self.ui.warn(_("no interrupted transaction available\n"))
1993 return False
1993 return False
1994
1994
1995 def rollback(self, dryrun=False, force=False):
1995 def rollback(self, dryrun=False, force=False):
1996 wlock = lock = dsguard = None
1996 wlock = lock = dsguard = None
1997 try:
1997 try:
1998 wlock = self.wlock()
1998 wlock = self.wlock()
1999 lock = self.lock()
1999 lock = self.lock()
2000 if self.svfs.exists("undo"):
2000 if self.svfs.exists("undo"):
2001 dsguard = dirstateguard.dirstateguard(self, 'rollback')
2001 dsguard = dirstateguard.dirstateguard(self, 'rollback')
2002
2002
2003 return self._rollback(dryrun, force, dsguard)
2003 return self._rollback(dryrun, force, dsguard)
2004 else:
2004 else:
2005 self.ui.warn(_("no rollback information available\n"))
2005 self.ui.warn(_("no rollback information available\n"))
2006 return 1
2006 return 1
2007 finally:
2007 finally:
2008 release(dsguard, lock, wlock)
2008 release(dsguard, lock, wlock)
2009
2009
2010 @unfilteredmethod # Until we get smarter cache management
2010 @unfilteredmethod # Until we get smarter cache management
2011 def _rollback(self, dryrun, force, dsguard):
2011 def _rollback(self, dryrun, force, dsguard):
2012 ui = self.ui
2012 ui = self.ui
2013 try:
2013 try:
2014 args = self.vfs.read('undo.desc').splitlines()
2014 args = self.vfs.read('undo.desc').splitlines()
2015 (oldlen, desc, detail) = (int(args[0]), args[1], None)
2015 (oldlen, desc, detail) = (int(args[0]), args[1], None)
2016 if len(args) >= 3:
2016 if len(args) >= 3:
2017 detail = args[2]
2017 detail = args[2]
2018 oldtip = oldlen - 1
2018 oldtip = oldlen - 1
2019
2019
2020 if detail and ui.verbose:
2020 if detail and ui.verbose:
2021 msg = (_('repository tip rolled back to revision %d'
2021 msg = (_('repository tip rolled back to revision %d'
2022 ' (undo %s: %s)\n')
2022 ' (undo %s: %s)\n')
2023 % (oldtip, desc, detail))
2023 % (oldtip, desc, detail))
2024 else:
2024 else:
2025 msg = (_('repository tip rolled back to revision %d'
2025 msg = (_('repository tip rolled back to revision %d'
2026 ' (undo %s)\n')
2026 ' (undo %s)\n')
2027 % (oldtip, desc))
2027 % (oldtip, desc))
2028 except IOError:
2028 except IOError:
2029 msg = _('rolling back unknown transaction\n')
2029 msg = _('rolling back unknown transaction\n')
2030 desc = None
2030 desc = None
2031
2031
2032 if not force and self['.'] != self['tip'] and desc == 'commit':
2032 if not force and self['.'] != self['tip'] and desc == 'commit':
2033 raise error.Abort(
2033 raise error.Abort(
2034 _('rollback of last commit while not checked out '
2034 _('rollback of last commit while not checked out '
2035 'may lose data'), hint=_('use -f to force'))
2035 'may lose data'), hint=_('use -f to force'))
2036
2036
2037 ui.status(msg)
2037 ui.status(msg)
2038 if dryrun:
2038 if dryrun:
2039 return 0
2039 return 0
2040
2040
2041 parents = self.dirstate.parents()
2041 parents = self.dirstate.parents()
2042 self.destroying()
2042 self.destroying()
2043 vfsmap = {'plain': self.vfs, '': self.svfs}
2043 vfsmap = {'plain': self.vfs, '': self.svfs}
2044 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
2044 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
2045 checkambigfiles=_cachedfiles)
2045 checkambigfiles=_cachedfiles)
2046 if self.vfs.exists('undo.bookmarks'):
2046 if self.vfs.exists('undo.bookmarks'):
2047 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
2047 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
2048 if self.svfs.exists('undo.phaseroots'):
2048 if self.svfs.exists('undo.phaseroots'):
2049 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
2049 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
2050 self.invalidate()
2050 self.invalidate()
2051
2051
2052 parentgone = any(p not in self.changelog.nodemap for p in parents)
2052 parentgone = any(p not in self.changelog.nodemap for p in parents)
2053 if parentgone:
2053 if parentgone:
2054 # prevent dirstateguard from overwriting already restored one
2054 # prevent dirstateguard from overwriting already restored one
2055 dsguard.close()
2055 dsguard.close()
2056
2056
2057 narrowspec.restorebackup(self, 'undo.narrowspec')
2057 narrowspec.restorebackup(self, 'undo.narrowspec')
2058 narrowspec.restorewcbackup(self, 'undo.narrowspec.dirstate')
2058 narrowspec.restorewcbackup(self, 'undo.narrowspec.dirstate')
2059 self.dirstate.restorebackup(None, 'undo.dirstate')
2059 self.dirstate.restorebackup(None, 'undo.dirstate')
2060 try:
2060 try:
2061 branch = self.vfs.read('undo.branch')
2061 branch = self.vfs.read('undo.branch')
2062 self.dirstate.setbranch(encoding.tolocal(branch))
2062 self.dirstate.setbranch(encoding.tolocal(branch))
2063 except IOError:
2063 except IOError:
2064 ui.warn(_('named branch could not be reset: '
2064 ui.warn(_('named branch could not be reset: '
2065 'current branch is still \'%s\'\n')
2065 'current branch is still \'%s\'\n')
2066 % self.dirstate.branch())
2066 % self.dirstate.branch())
2067
2067
2068 parents = tuple([p.rev() for p in self[None].parents()])
2068 parents = tuple([p.rev() for p in self[None].parents()])
2069 if len(parents) > 1:
2069 if len(parents) > 1:
2070 ui.status(_('working directory now based on '
2070 ui.status(_('working directory now based on '
2071 'revisions %d and %d\n') % parents)
2071 'revisions %d and %d\n') % parents)
2072 else:
2072 else:
2073 ui.status(_('working directory now based on '
2073 ui.status(_('working directory now based on '
2074 'revision %d\n') % parents)
2074 'revision %d\n') % parents)
2075 mergemod.mergestate.clean(self, self['.'].node())
2075 mergemod.mergestate.clean(self, self['.'].node())
2076
2076
2077 # TODO: if we know which new heads may result from this rollback, pass
2077 # TODO: if we know which new heads may result from this rollback, pass
2078 # them to destroy(), which will prevent the branchhead cache from being
2078 # them to destroy(), which will prevent the branchhead cache from being
2079 # invalidated.
2079 # invalidated.
2080 self.destroyed()
2080 self.destroyed()
2081 return 0
2081 return 0
2082
2082
2083 def _buildcacheupdater(self, newtransaction):
2083 def _buildcacheupdater(self, newtransaction):
2084 """called during transaction to build the callback updating cache
2084 """called during transaction to build the callback updating cache
2085
2085
2086 Lives on the repository to help extension who might want to augment
2086 Lives on the repository to help extension who might want to augment
2087 this logic. For this purpose, the created transaction is passed to the
2087 this logic. For this purpose, the created transaction is passed to the
2088 method.
2088 method.
2089 """
2089 """
2090 # we must avoid cyclic reference between repo and transaction.
2090 # we must avoid cyclic reference between repo and transaction.
2091 reporef = weakref.ref(self)
2091 reporef = weakref.ref(self)
2092 def updater(tr):
2092 def updater(tr):
2093 repo = reporef()
2093 repo = reporef()
2094 repo.updatecaches(tr)
2094 repo.updatecaches(tr)
2095 return updater
2095 return updater
2096
2096
2097 @unfilteredmethod
2097 @unfilteredmethod
2098 def updatecaches(self, tr=None, full=False):
2098 def updatecaches(self, tr=None, full=False):
2099 """warm appropriate caches
2099 """warm appropriate caches
2100
2100
2101 If this function is called after a transaction closed. The transaction
2101 If this function is called after a transaction closed. The transaction
2102 will be available in the 'tr' argument. This can be used to selectively
2102 will be available in the 'tr' argument. This can be used to selectively
2103 update caches relevant to the changes in that transaction.
2103 update caches relevant to the changes in that transaction.
2104
2104
2105 If 'full' is set, make sure all caches the function knows about have
2105 If 'full' is set, make sure all caches the function knows about have
2106 up-to-date data. Even the ones usually loaded more lazily.
2106 up-to-date data. Even the ones usually loaded more lazily.
2107 """
2107 """
2108 if tr is not None and tr.hookargs.get('source') == 'strip':
2108 if tr is not None and tr.hookargs.get('source') == 'strip':
2109 # During strip, many caches are invalid but
2109 # During strip, many caches are invalid but
2110 # later call to `destroyed` will refresh them.
2110 # later call to `destroyed` will refresh them.
2111 return
2111 return
2112
2112
2113 if tr is None or tr.changes['origrepolen'] < len(self):
2113 if tr is None or tr.changes['origrepolen'] < len(self):
2114 # accessing the 'ser ved' branchmap should refresh all the others,
2114 # accessing the 'ser ved' branchmap should refresh all the others,
2115 self.ui.debug('updating the branch cache\n')
2115 self.ui.debug('updating the branch cache\n')
2116 self.filtered('served').branchmap()
2116 self.filtered('served').branchmap()
2117 self.filtered('served.hidden').branchmap()
2117 self.filtered('served.hidden').branchmap()
2118
2118
2119 if full:
2119 if full:
2120 unfi = self.unfiltered()
2120 unfi = self.unfiltered()
2121 rbc = unfi.revbranchcache()
2121 rbc = unfi.revbranchcache()
2122 for r in unfi.changelog:
2122 for r in unfi.changelog:
2123 rbc.branchinfo(r)
2123 rbc.branchinfo(r)
2124 rbc.write()
2124 rbc.write()
2125
2125
2126 # ensure the working copy parents are in the manifestfulltextcache
2126 # ensure the working copy parents are in the manifestfulltextcache
2127 for ctx in self['.'].parents():
2127 for ctx in self['.'].parents():
2128 ctx.manifest() # accessing the manifest is enough
2128 ctx.manifest() # accessing the manifest is enough
2129
2129
2130 # accessing tags warm the cache
2130 # accessing tags warm the cache
2131 self.tags()
2131 self.tags()
2132 self.filtered('served').tags()
2132 self.filtered('served').tags()
2133
2133
2134 def invalidatecaches(self):
2134 def invalidatecaches(self):
2135
2135
2136 if r'_tagscache' in vars(self):
2136 if r'_tagscache' in vars(self):
2137 # can't use delattr on proxy
2137 # can't use delattr on proxy
2138 del self.__dict__[r'_tagscache']
2138 del self.__dict__[r'_tagscache']
2139
2139
2140 self._branchcaches.clear()
2140 self._branchcaches.clear()
2141 self.invalidatevolatilesets()
2141 self.invalidatevolatilesets()
2142 self._sparsesignaturecache.clear()
2142 self._sparsesignaturecache.clear()
2143
2143
2144 def invalidatevolatilesets(self):
2144 def invalidatevolatilesets(self):
2145 self.filteredrevcache.clear()
2145 self.filteredrevcache.clear()
2146 obsolete.clearobscaches(self)
2146 obsolete.clearobscaches(self)
2147
2147
2148 def invalidatedirstate(self):
2148 def invalidatedirstate(self):
2149 '''Invalidates the dirstate, causing the next call to dirstate
2149 '''Invalidates the dirstate, causing the next call to dirstate
2150 to check if it was modified since the last time it was read,
2150 to check if it was modified since the last time it was read,
2151 rereading it if it has.
2151 rereading it if it has.
2152
2152
2153 This is different to dirstate.invalidate() that it doesn't always
2153 This is different to dirstate.invalidate() that it doesn't always
2154 rereads the dirstate. Use dirstate.invalidate() if you want to
2154 rereads the dirstate. Use dirstate.invalidate() if you want to
2155 explicitly read the dirstate again (i.e. restoring it to a previous
2155 explicitly read the dirstate again (i.e. restoring it to a previous
2156 known good state).'''
2156 known good state).'''
2157 if hasunfilteredcache(self, r'dirstate'):
2157 if hasunfilteredcache(self, r'dirstate'):
2158 for k in self.dirstate._filecache:
2158 for k in self.dirstate._filecache:
2159 try:
2159 try:
2160 delattr(self.dirstate, k)
2160 delattr(self.dirstate, k)
2161 except AttributeError:
2161 except AttributeError:
2162 pass
2162 pass
2163 delattr(self.unfiltered(), r'dirstate')
2163 delattr(self.unfiltered(), r'dirstate')
2164
2164
2165 def invalidate(self, clearfilecache=False):
2165 def invalidate(self, clearfilecache=False):
2166 '''Invalidates both store and non-store parts other than dirstate
2166 '''Invalidates both store and non-store parts other than dirstate
2167
2167
2168 If a transaction is running, invalidation of store is omitted,
2168 If a transaction is running, invalidation of store is omitted,
2169 because discarding in-memory changes might cause inconsistency
2169 because discarding in-memory changes might cause inconsistency
2170 (e.g. incomplete fncache causes unintentional failure, but
2170 (e.g. incomplete fncache causes unintentional failure, but
2171 redundant one doesn't).
2171 redundant one doesn't).
2172 '''
2172 '''
2173 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2173 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2174 for k in list(self._filecache.keys()):
2174 for k in list(self._filecache.keys()):
2175 # dirstate is invalidated separately in invalidatedirstate()
2175 # dirstate is invalidated separately in invalidatedirstate()
2176 if k == 'dirstate':
2176 if k == 'dirstate':
2177 continue
2177 continue
2178 if (k == 'changelog' and
2178 if (k == 'changelog' and
2179 self.currenttransaction() and
2179 self.currenttransaction() and
2180 self.changelog._delayed):
2180 self.changelog._delayed):
2181 # The changelog object may store unwritten revisions. We don't
2181 # The changelog object may store unwritten revisions. We don't
2182 # want to lose them.
2182 # want to lose them.
2183 # TODO: Solve the problem instead of working around it.
2183 # TODO: Solve the problem instead of working around it.
2184 continue
2184 continue
2185
2185
2186 if clearfilecache:
2186 if clearfilecache:
2187 del self._filecache[k]
2187 del self._filecache[k]
2188 try:
2188 try:
2189 delattr(unfiltered, k)
2189 delattr(unfiltered, k)
2190 except AttributeError:
2190 except AttributeError:
2191 pass
2191 pass
2192 self.invalidatecaches()
2192 self.invalidatecaches()
2193 if not self.currenttransaction():
2193 if not self.currenttransaction():
2194 # TODO: Changing contents of store outside transaction
2194 # TODO: Changing contents of store outside transaction
2195 # causes inconsistency. We should make in-memory store
2195 # causes inconsistency. We should make in-memory store
2196 # changes detectable, and abort if changed.
2196 # changes detectable, and abort if changed.
2197 self.store.invalidatecaches()
2197 self.store.invalidatecaches()
2198
2198
2199 def invalidateall(self):
2199 def invalidateall(self):
2200 '''Fully invalidates both store and non-store parts, causing the
2200 '''Fully invalidates both store and non-store parts, causing the
2201 subsequent operation to reread any outside changes.'''
2201 subsequent operation to reread any outside changes.'''
2202 # extension should hook this to invalidate its caches
2202 # extension should hook this to invalidate its caches
2203 self.invalidate()
2203 self.invalidate()
2204 self.invalidatedirstate()
2204 self.invalidatedirstate()
2205
2205
2206 @unfilteredmethod
2206 @unfilteredmethod
2207 def _refreshfilecachestats(self, tr):
2207 def _refreshfilecachestats(self, tr):
2208 """Reload stats of cached files so that they are flagged as valid"""
2208 """Reload stats of cached files so that they are flagged as valid"""
2209 for k, ce in self._filecache.items():
2209 for k, ce in self._filecache.items():
2210 k = pycompat.sysstr(k)
2210 k = pycompat.sysstr(k)
2211 if k == r'dirstate' or k not in self.__dict__:
2211 if k == r'dirstate' or k not in self.__dict__:
2212 continue
2212 continue
2213 ce.refresh()
2213 ce.refresh()
2214
2214
2215 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
2215 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
2216 inheritchecker=None, parentenvvar=None):
2216 inheritchecker=None, parentenvvar=None):
2217 parentlock = None
2217 parentlock = None
2218 # the contents of parentenvvar are used by the underlying lock to
2218 # the contents of parentenvvar are used by the underlying lock to
2219 # determine whether it can be inherited
2219 # determine whether it can be inherited
2220 if parentenvvar is not None:
2220 if parentenvvar is not None:
2221 parentlock = encoding.environ.get(parentenvvar)
2221 parentlock = encoding.environ.get(parentenvvar)
2222
2222
2223 timeout = 0
2223 timeout = 0
2224 warntimeout = 0
2224 warntimeout = 0
2225 if wait:
2225 if wait:
2226 timeout = self.ui.configint("ui", "timeout")
2226 timeout = self.ui.configint("ui", "timeout")
2227 warntimeout = self.ui.configint("ui", "timeout.warn")
2227 warntimeout = self.ui.configint("ui", "timeout.warn")
2228 # internal config: ui.signal-safe-lock
2228 # internal config: ui.signal-safe-lock
2229 signalsafe = self.ui.configbool('ui', 'signal-safe-lock')
2229 signalsafe = self.ui.configbool('ui', 'signal-safe-lock')
2230
2230
2231 l = lockmod.trylock(self.ui, vfs, lockname, timeout, warntimeout,
2231 l = lockmod.trylock(self.ui, vfs, lockname, timeout, warntimeout,
2232 releasefn=releasefn,
2232 releasefn=releasefn,
2233 acquirefn=acquirefn, desc=desc,
2233 acquirefn=acquirefn, desc=desc,
2234 inheritchecker=inheritchecker,
2234 inheritchecker=inheritchecker,
2235 parentlock=parentlock,
2235 parentlock=parentlock,
2236 signalsafe=signalsafe)
2236 signalsafe=signalsafe)
2237 return l
2237 return l
2238
2238
2239 def _afterlock(self, callback):
2239 def _afterlock(self, callback):
2240 """add a callback to be run when the repository is fully unlocked
2240 """add a callback to be run when the repository is fully unlocked
2241
2241
2242 The callback will be executed when the outermost lock is released
2242 The callback will be executed when the outermost lock is released
2243 (with wlock being higher level than 'lock')."""
2243 (with wlock being higher level than 'lock')."""
2244 for ref in (self._wlockref, self._lockref):
2244 for ref in (self._wlockref, self._lockref):
2245 l = ref and ref()
2245 l = ref and ref()
2246 if l and l.held:
2246 if l and l.held:
2247 l.postrelease.append(callback)
2247 l.postrelease.append(callback)
2248 break
2248 break
2249 else: # no lock have been found.
2249 else: # no lock have been found.
2250 callback()
2250 callback()
2251
2251
2252 def lock(self, wait=True):
2252 def lock(self, wait=True):
2253 '''Lock the repository store (.hg/store) and return a weak reference
2253 '''Lock the repository store (.hg/store) and return a weak reference
2254 to the lock. Use this before modifying the store (e.g. committing or
2254 to the lock. Use this before modifying the store (e.g. committing or
2255 stripping). If you are opening a transaction, get a lock as well.)
2255 stripping). If you are opening a transaction, get a lock as well.)
2256
2256
2257 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2257 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2258 'wlock' first to avoid a dead-lock hazard.'''
2258 'wlock' first to avoid a dead-lock hazard.'''
2259 l = self._currentlock(self._lockref)
2259 l = self._currentlock(self._lockref)
2260 if l is not None:
2260 if l is not None:
2261 l.lock()
2261 l.lock()
2262 return l
2262 return l
2263
2263
2264 l = self._lock(vfs=self.svfs,
2264 l = self._lock(vfs=self.svfs,
2265 lockname="lock",
2265 lockname="lock",
2266 wait=wait,
2266 wait=wait,
2267 releasefn=None,
2267 releasefn=None,
2268 acquirefn=self.invalidate,
2268 acquirefn=self.invalidate,
2269 desc=_('repository %s') % self.origroot)
2269 desc=_('repository %s') % self.origroot)
2270 self._lockref = weakref.ref(l)
2270 self._lockref = weakref.ref(l)
2271 return l
2271 return l
2272
2272
2273 def _wlockchecktransaction(self):
2273 def _wlockchecktransaction(self):
2274 if self.currenttransaction() is not None:
2274 if self.currenttransaction() is not None:
2275 raise error.LockInheritanceContractViolation(
2275 raise error.LockInheritanceContractViolation(
2276 'wlock cannot be inherited in the middle of a transaction')
2276 'wlock cannot be inherited in the middle of a transaction')
2277
2277
2278 def wlock(self, wait=True):
2278 def wlock(self, wait=True):
2279 '''Lock the non-store parts of the repository (everything under
2279 '''Lock the non-store parts of the repository (everything under
2280 .hg except .hg/store) and return a weak reference to the lock.
2280 .hg except .hg/store) and return a weak reference to the lock.
2281
2281
2282 Use this before modifying files in .hg.
2282 Use this before modifying files in .hg.
2283
2283
2284 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2284 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2285 'wlock' first to avoid a dead-lock hazard.'''
2285 'wlock' first to avoid a dead-lock hazard.'''
2286 l = self._wlockref and self._wlockref()
2286 l = self._wlockref and self._wlockref()
2287 if l is not None and l.held:
2287 if l is not None and l.held:
2288 l.lock()
2288 l.lock()
2289 return l
2289 return l
2290
2290
2291 # We do not need to check for non-waiting lock acquisition. Such
2291 # We do not need to check for non-waiting lock acquisition. Such
2292 # acquisition would not cause dead-lock as they would just fail.
2292 # acquisition would not cause dead-lock as they would just fail.
2293 if wait and (self.ui.configbool('devel', 'all-warnings')
2293 if wait and (self.ui.configbool('devel', 'all-warnings')
2294 or self.ui.configbool('devel', 'check-locks')):
2294 or self.ui.configbool('devel', 'check-locks')):
2295 if self._currentlock(self._lockref) is not None:
2295 if self._currentlock(self._lockref) is not None:
2296 self.ui.develwarn('"wlock" acquired after "lock"')
2296 self.ui.develwarn('"wlock" acquired after "lock"')
2297
2297
2298 def unlock():
2298 def unlock():
2299 if self.dirstate.pendingparentchange():
2299 if self.dirstate.pendingparentchange():
2300 self.dirstate.invalidate()
2300 self.dirstate.invalidate()
2301 else:
2301 else:
2302 self.dirstate.write(None)
2302 self.dirstate.write(None)
2303
2303
2304 self._filecache['dirstate'].refresh()
2304 self._filecache['dirstate'].refresh()
2305
2305
2306 l = self._lock(self.vfs, "wlock", wait, unlock,
2306 l = self._lock(self.vfs, "wlock", wait, unlock,
2307 self.invalidatedirstate, _('working directory of %s') %
2307 self.invalidatedirstate, _('working directory of %s') %
2308 self.origroot,
2308 self.origroot,
2309 inheritchecker=self._wlockchecktransaction,
2309 inheritchecker=self._wlockchecktransaction,
2310 parentenvvar='HG_WLOCK_LOCKER')
2310 parentenvvar='HG_WLOCK_LOCKER')
2311 self._wlockref = weakref.ref(l)
2311 self._wlockref = weakref.ref(l)
2312 return l
2312 return l
2313
2313
2314 def _currentlock(self, lockref):
2314 def _currentlock(self, lockref):
2315 """Returns the lock if it's held, or None if it's not."""
2315 """Returns the lock if it's held, or None if it's not."""
2316 if lockref is None:
2316 if lockref is None:
2317 return None
2317 return None
2318 l = lockref()
2318 l = lockref()
2319 if l is None or not l.held:
2319 if l is None or not l.held:
2320 return None
2320 return None
2321 return l
2321 return l
2322
2322
2323 def currentwlock(self):
2323 def currentwlock(self):
2324 """Returns the wlock if it's held, or None if it's not."""
2324 """Returns the wlock if it's held, or None if it's not."""
2325 return self._currentlock(self._wlockref)
2325 return self._currentlock(self._wlockref)
2326
2326
2327 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
2327 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist,
2328 includecopymeta):
2328 """
2329 """
2329 commit an individual file as part of a larger transaction
2330 commit an individual file as part of a larger transaction
2330 """
2331 """
2331
2332
2332 fname = fctx.path()
2333 fname = fctx.path()
2333 fparent1 = manifest1.get(fname, nullid)
2334 fparent1 = manifest1.get(fname, nullid)
2334 fparent2 = manifest2.get(fname, nullid)
2335 fparent2 = manifest2.get(fname, nullid)
2335 if isinstance(fctx, context.filectx):
2336 if isinstance(fctx, context.filectx):
2336 node = fctx.filenode()
2337 node = fctx.filenode()
2337 if node in [fparent1, fparent2]:
2338 if node in [fparent1, fparent2]:
2338 self.ui.debug('reusing %s filelog entry\n' % fname)
2339 self.ui.debug('reusing %s filelog entry\n' % fname)
2339 if manifest1.flags(fname) != fctx.flags():
2340 if manifest1.flags(fname) != fctx.flags():
2340 changelist.append(fname)
2341 changelist.append(fname)
2341 return node
2342 return node
2342
2343
2343 flog = self.file(fname)
2344 flog = self.file(fname)
2344 meta = {}
2345 meta = {}
2345 cfname = fctx.copysource()
2346 cfname = fctx.copysource()
2346 if cfname and cfname != fname:
2347 if cfname and cfname != fname:
2347 # Mark the new revision of this file as a copy of another
2348 # Mark the new revision of this file as a copy of another
2348 # file. This copy data will effectively act as a parent
2349 # file. This copy data will effectively act as a parent
2349 # of this new revision. If this is a merge, the first
2350 # of this new revision. If this is a merge, the first
2350 # parent will be the nullid (meaning "look up the copy data")
2351 # parent will be the nullid (meaning "look up the copy data")
2351 # and the second one will be the other parent. For example:
2352 # and the second one will be the other parent. For example:
2352 #
2353 #
2353 # 0 --- 1 --- 3 rev1 changes file foo
2354 # 0 --- 1 --- 3 rev1 changes file foo
2354 # \ / rev2 renames foo to bar and changes it
2355 # \ / rev2 renames foo to bar and changes it
2355 # \- 2 -/ rev3 should have bar with all changes and
2356 # \- 2 -/ rev3 should have bar with all changes and
2356 # should record that bar descends from
2357 # should record that bar descends from
2357 # bar in rev2 and foo in rev1
2358 # bar in rev2 and foo in rev1
2358 #
2359 #
2359 # this allows this merge to succeed:
2360 # this allows this merge to succeed:
2360 #
2361 #
2361 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
2362 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
2362 # \ / merging rev3 and rev4 should use bar@rev2
2363 # \ / merging rev3 and rev4 should use bar@rev2
2363 # \- 2 --- 4 as the merge base
2364 # \- 2 --- 4 as the merge base
2364 #
2365 #
2365
2366
2366 cnode = manifest1.get(cfname)
2367 cnode = manifest1.get(cfname)
2367 newfparent = fparent2
2368 newfparent = fparent2
2368
2369
2369 if manifest2: # branch merge
2370 if manifest2: # branch merge
2370 if fparent2 == nullid or cnode is None: # copied on remote side
2371 if fparent2 == nullid or cnode is None: # copied on remote side
2371 if cfname in manifest2:
2372 if cfname in manifest2:
2372 cnode = manifest2[cfname]
2373 cnode = manifest2[cfname]
2373 newfparent = fparent1
2374 newfparent = fparent1
2374
2375
2375 # Here, we used to search backwards through history to try to find
2376 # Here, we used to search backwards through history to try to find
2376 # where the file copy came from if the source of a copy was not in
2377 # where the file copy came from if the source of a copy was not in
2377 # the parent directory. However, this doesn't actually make sense to
2378 # the parent directory. However, this doesn't actually make sense to
2378 # do (what does a copy from something not in your working copy even
2379 # do (what does a copy from something not in your working copy even
2379 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
2380 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
2380 # the user that copy information was dropped, so if they didn't
2381 # the user that copy information was dropped, so if they didn't
2381 # expect this outcome it can be fixed, but this is the correct
2382 # expect this outcome it can be fixed, but this is the correct
2382 # behavior in this circumstance.
2383 # behavior in this circumstance.
2383
2384
2384 if cnode:
2385 if cnode:
2385 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(cnode)))
2386 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(cnode)))
2386 meta["copy"] = cfname
2387 if includecopymeta:
2387 meta["copyrev"] = hex(cnode)
2388 meta["copy"] = cfname
2389 meta["copyrev"] = hex(cnode)
2388 fparent1, fparent2 = nullid, newfparent
2390 fparent1, fparent2 = nullid, newfparent
2389 else:
2391 else:
2390 self.ui.warn(_("warning: can't find ancestor for '%s' "
2392 self.ui.warn(_("warning: can't find ancestor for '%s' "
2391 "copied from '%s'!\n") % (fname, cfname))
2393 "copied from '%s'!\n") % (fname, cfname))
2392
2394
2393 elif fparent1 == nullid:
2395 elif fparent1 == nullid:
2394 fparent1, fparent2 = fparent2, nullid
2396 fparent1, fparent2 = fparent2, nullid
2395 elif fparent2 != nullid:
2397 elif fparent2 != nullid:
2396 # is one parent an ancestor of the other?
2398 # is one parent an ancestor of the other?
2397 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
2399 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
2398 if fparent1 in fparentancestors:
2400 if fparent1 in fparentancestors:
2399 fparent1, fparent2 = fparent2, nullid
2401 fparent1, fparent2 = fparent2, nullid
2400 elif fparent2 in fparentancestors:
2402 elif fparent2 in fparentancestors:
2401 fparent2 = nullid
2403 fparent2 = nullid
2402
2404
2403 # is the file changed?
2405 # is the file changed?
2404 text = fctx.data()
2406 text = fctx.data()
2405 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
2407 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
2406 changelist.append(fname)
2408 changelist.append(fname)
2407 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
2409 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
2408 # are just the flags changed during merge?
2410 # are just the flags changed during merge?
2409 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
2411 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
2410 changelist.append(fname)
2412 changelist.append(fname)
2411
2413
2412 return fparent1
2414 return fparent1
2413
2415
2414 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
2416 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
2415 """check for commit arguments that aren't committable"""
2417 """check for commit arguments that aren't committable"""
2416 if match.isexact() or match.prefix():
2418 if match.isexact() or match.prefix():
2417 matched = set(status.modified + status.added + status.removed)
2419 matched = set(status.modified + status.added + status.removed)
2418
2420
2419 for f in match.files():
2421 for f in match.files():
2420 f = self.dirstate.normalize(f)
2422 f = self.dirstate.normalize(f)
2421 if f == '.' or f in matched or f in wctx.substate:
2423 if f == '.' or f in matched or f in wctx.substate:
2422 continue
2424 continue
2423 if f in status.deleted:
2425 if f in status.deleted:
2424 fail(f, _('file not found!'))
2426 fail(f, _('file not found!'))
2425 if f in vdirs: # visited directory
2427 if f in vdirs: # visited directory
2426 d = f + '/'
2428 d = f + '/'
2427 for mf in matched:
2429 for mf in matched:
2428 if mf.startswith(d):
2430 if mf.startswith(d):
2429 break
2431 break
2430 else:
2432 else:
2431 fail(f, _("no match under directory!"))
2433 fail(f, _("no match under directory!"))
2432 elif f not in self.dirstate:
2434 elif f not in self.dirstate:
2433 fail(f, _("file not tracked!"))
2435 fail(f, _("file not tracked!"))
2434
2436
2435 @unfilteredmethod
2437 @unfilteredmethod
2436 def commit(self, text="", user=None, date=None, match=None, force=False,
2438 def commit(self, text="", user=None, date=None, match=None, force=False,
2437 editor=False, extra=None):
2439 editor=False, extra=None):
2438 """Add a new revision to current repository.
2440 """Add a new revision to current repository.
2439
2441
2440 Revision information is gathered from the working directory,
2442 Revision information is gathered from the working directory,
2441 match can be used to filter the committed files. If editor is
2443 match can be used to filter the committed files. If editor is
2442 supplied, it is called to get a commit message.
2444 supplied, it is called to get a commit message.
2443 """
2445 """
2444 if extra is None:
2446 if extra is None:
2445 extra = {}
2447 extra = {}
2446
2448
2447 def fail(f, msg):
2449 def fail(f, msg):
2448 raise error.Abort('%s: %s' % (f, msg))
2450 raise error.Abort('%s: %s' % (f, msg))
2449
2451
2450 if not match:
2452 if not match:
2451 match = matchmod.always()
2453 match = matchmod.always()
2452
2454
2453 if not force:
2455 if not force:
2454 vdirs = []
2456 vdirs = []
2455 match.explicitdir = vdirs.append
2457 match.explicitdir = vdirs.append
2456 match.bad = fail
2458 match.bad = fail
2457
2459
2458 # lock() for recent changelog (see issue4368)
2460 # lock() for recent changelog (see issue4368)
2459 with self.wlock(), self.lock():
2461 with self.wlock(), self.lock():
2460 wctx = self[None]
2462 wctx = self[None]
2461 merge = len(wctx.parents()) > 1
2463 merge = len(wctx.parents()) > 1
2462
2464
2463 if not force and merge and not match.always():
2465 if not force and merge and not match.always():
2464 raise error.Abort(_('cannot partially commit a merge '
2466 raise error.Abort(_('cannot partially commit a merge '
2465 '(do not specify files or patterns)'))
2467 '(do not specify files or patterns)'))
2466
2468
2467 status = self.status(match=match, clean=force)
2469 status = self.status(match=match, clean=force)
2468 if force:
2470 if force:
2469 status.modified.extend(status.clean) # mq may commit clean files
2471 status.modified.extend(status.clean) # mq may commit clean files
2470
2472
2471 # check subrepos
2473 # check subrepos
2472 subs, commitsubs, newstate = subrepoutil.precommit(
2474 subs, commitsubs, newstate = subrepoutil.precommit(
2473 self.ui, wctx, status, match, force=force)
2475 self.ui, wctx, status, match, force=force)
2474
2476
2475 # make sure all explicit patterns are matched
2477 # make sure all explicit patterns are matched
2476 if not force:
2478 if not force:
2477 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
2479 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
2478
2480
2479 cctx = context.workingcommitctx(self, status,
2481 cctx = context.workingcommitctx(self, status,
2480 text, user, date, extra)
2482 text, user, date, extra)
2481
2483
2482 # internal config: ui.allowemptycommit
2484 # internal config: ui.allowemptycommit
2483 allowemptycommit = (wctx.branch() != wctx.p1().branch()
2485 allowemptycommit = (wctx.branch() != wctx.p1().branch()
2484 or extra.get('close') or merge or cctx.files()
2486 or extra.get('close') or merge or cctx.files()
2485 or self.ui.configbool('ui', 'allowemptycommit'))
2487 or self.ui.configbool('ui', 'allowemptycommit'))
2486 if not allowemptycommit:
2488 if not allowemptycommit:
2487 return None
2489 return None
2488
2490
2489 if merge and cctx.deleted():
2491 if merge and cctx.deleted():
2490 raise error.Abort(_("cannot commit merge with missing files"))
2492 raise error.Abort(_("cannot commit merge with missing files"))
2491
2493
2492 ms = mergemod.mergestate.read(self)
2494 ms = mergemod.mergestate.read(self)
2493 mergeutil.checkunresolved(ms)
2495 mergeutil.checkunresolved(ms)
2494
2496
2495 if editor:
2497 if editor:
2496 cctx._text = editor(self, cctx, subs)
2498 cctx._text = editor(self, cctx, subs)
2497 edited = (text != cctx._text)
2499 edited = (text != cctx._text)
2498
2500
2499 # Save commit message in case this transaction gets rolled back
2501 # Save commit message in case this transaction gets rolled back
2500 # (e.g. by a pretxncommit hook). Leave the content alone on
2502 # (e.g. by a pretxncommit hook). Leave the content alone on
2501 # the assumption that the user will use the same editor again.
2503 # the assumption that the user will use the same editor again.
2502 msgfn = self.savecommitmessage(cctx._text)
2504 msgfn = self.savecommitmessage(cctx._text)
2503
2505
2504 # commit subs and write new state
2506 # commit subs and write new state
2505 if subs:
2507 if subs:
2506 uipathfn = scmutil.getuipathfn(self)
2508 uipathfn = scmutil.getuipathfn(self)
2507 for s in sorted(commitsubs):
2509 for s in sorted(commitsubs):
2508 sub = wctx.sub(s)
2510 sub = wctx.sub(s)
2509 self.ui.status(_('committing subrepository %s\n') %
2511 self.ui.status(_('committing subrepository %s\n') %
2510 uipathfn(subrepoutil.subrelpath(sub)))
2512 uipathfn(subrepoutil.subrelpath(sub)))
2511 sr = sub.commit(cctx._text, user, date)
2513 sr = sub.commit(cctx._text, user, date)
2512 newstate[s] = (newstate[s][0], sr)
2514 newstate[s] = (newstate[s][0], sr)
2513 subrepoutil.writestate(self, newstate)
2515 subrepoutil.writestate(self, newstate)
2514
2516
2515 p1, p2 = self.dirstate.parents()
2517 p1, p2 = self.dirstate.parents()
2516 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
2518 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
2517 try:
2519 try:
2518 self.hook("precommit", throw=True, parent1=hookp1,
2520 self.hook("precommit", throw=True, parent1=hookp1,
2519 parent2=hookp2)
2521 parent2=hookp2)
2520 with self.transaction('commit'):
2522 with self.transaction('commit'):
2521 ret = self.commitctx(cctx, True)
2523 ret = self.commitctx(cctx, True)
2522 # update bookmarks, dirstate and mergestate
2524 # update bookmarks, dirstate and mergestate
2523 bookmarks.update(self, [p1, p2], ret)
2525 bookmarks.update(self, [p1, p2], ret)
2524 cctx.markcommitted(ret)
2526 cctx.markcommitted(ret)
2525 ms.reset()
2527 ms.reset()
2526 except: # re-raises
2528 except: # re-raises
2527 if edited:
2529 if edited:
2528 self.ui.write(
2530 self.ui.write(
2529 _('note: commit message saved in %s\n') % msgfn)
2531 _('note: commit message saved in %s\n') % msgfn)
2530 raise
2532 raise
2531
2533
2532 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
2534 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
2533 # hack for command that use a temporary commit (eg: histedit)
2535 # hack for command that use a temporary commit (eg: histedit)
2534 # temporary commit got stripped before hook release
2536 # temporary commit got stripped before hook release
2535 if self.changelog.hasnode(ret):
2537 if self.changelog.hasnode(ret):
2536 self.hook("commit", node=node, parent1=parent1,
2538 self.hook("commit", node=node, parent1=parent1,
2537 parent2=parent2)
2539 parent2=parent2)
2538 self._afterlock(commithook)
2540 self._afterlock(commithook)
2539 return ret
2541 return ret
2540
2542
2541 @unfilteredmethod
2543 @unfilteredmethod
2542 def commitctx(self, ctx, error=False):
2544 def commitctx(self, ctx, error=False):
2543 """Add a new revision to current repository.
2545 """Add a new revision to current repository.
2544 Revision information is passed via the context argument.
2546 Revision information is passed via the context argument.
2545
2547
2546 ctx.files() should list all files involved in this commit, i.e.
2548 ctx.files() should list all files involved in this commit, i.e.
2547 modified/added/removed files. On merge, it may be wider than the
2549 modified/added/removed files. On merge, it may be wider than the
2548 ctx.files() to be committed, since any file nodes derived directly
2550 ctx.files() to be committed, since any file nodes derived directly
2549 from p1 or p2 are excluded from the committed ctx.files().
2551 from p1 or p2 are excluded from the committed ctx.files().
2550 """
2552 """
2551
2553
2552 p1, p2 = ctx.p1(), ctx.p2()
2554 p1, p2 = ctx.p1(), ctx.p2()
2553 user = ctx.user()
2555 user = ctx.user()
2554
2556
2557 writecopiesto = self.ui.config('experimental', 'copies.write-to')
2558 writefilecopymeta = writecopiesto != 'changeset-only'
2559 p1copies, p2copies = None, None
2560 if writecopiesto in ('changeset-only', 'compatibility'):
2561 p1copies = ctx.p1copies()
2562 p2copies = ctx.p2copies()
2555 with self.lock(), self.transaction("commit") as tr:
2563 with self.lock(), self.transaction("commit") as tr:
2556 trp = weakref.proxy(tr)
2564 trp = weakref.proxy(tr)
2557
2565
2558 if ctx.manifestnode():
2566 if ctx.manifestnode():
2559 # reuse an existing manifest revision
2567 # reuse an existing manifest revision
2560 self.ui.debug('reusing known manifest\n')
2568 self.ui.debug('reusing known manifest\n')
2561 mn = ctx.manifestnode()
2569 mn = ctx.manifestnode()
2562 files = ctx.files()
2570 files = ctx.files()
2563 elif ctx.files():
2571 elif ctx.files():
2564 m1ctx = p1.manifestctx()
2572 m1ctx = p1.manifestctx()
2565 m2ctx = p2.manifestctx()
2573 m2ctx = p2.manifestctx()
2566 mctx = m1ctx.copy()
2574 mctx = m1ctx.copy()
2567
2575
2568 m = mctx.read()
2576 m = mctx.read()
2569 m1 = m1ctx.read()
2577 m1 = m1ctx.read()
2570 m2 = m2ctx.read()
2578 m2 = m2ctx.read()
2571
2579
2572 # check in files
2580 # check in files
2573 added = []
2581 added = []
2574 changed = []
2582 changed = []
2575 removed = list(ctx.removed())
2583 removed = list(ctx.removed())
2576 linkrev = len(self)
2584 linkrev = len(self)
2577 self.ui.note(_("committing files:\n"))
2585 self.ui.note(_("committing files:\n"))
2578 uipathfn = scmutil.getuipathfn(self)
2586 uipathfn = scmutil.getuipathfn(self)
2579 for f in sorted(ctx.modified() + ctx.added()):
2587 for f in sorted(ctx.modified() + ctx.added()):
2580 self.ui.note(uipathfn(f) + "\n")
2588 self.ui.note(uipathfn(f) + "\n")
2581 try:
2589 try:
2582 fctx = ctx[f]
2590 fctx = ctx[f]
2583 if fctx is None:
2591 if fctx is None:
2584 removed.append(f)
2592 removed.append(f)
2585 else:
2593 else:
2586 added.append(f)
2594 added.append(f)
2587 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2595 m[f] = self._filecommit(fctx, m1, m2, linkrev,
2588 trp, changed)
2596 trp, changed,
2597 writefilecopymeta)
2589 m.setflag(f, fctx.flags())
2598 m.setflag(f, fctx.flags())
2590 except OSError:
2599 except OSError:
2591 self.ui.warn(_("trouble committing %s!\n") %
2600 self.ui.warn(_("trouble committing %s!\n") %
2592 uipathfn(f))
2601 uipathfn(f))
2593 raise
2602 raise
2594 except IOError as inst:
2603 except IOError as inst:
2595 errcode = getattr(inst, 'errno', errno.ENOENT)
2604 errcode = getattr(inst, 'errno', errno.ENOENT)
2596 if error or errcode and errcode != errno.ENOENT:
2605 if error or errcode and errcode != errno.ENOENT:
2597 self.ui.warn(_("trouble committing %s!\n") %
2606 self.ui.warn(_("trouble committing %s!\n") %
2598 uipathfn(f))
2607 uipathfn(f))
2599 raise
2608 raise
2600
2609
2601 # update manifest
2610 # update manifest
2602 removed = [f for f in sorted(removed) if f in m1 or f in m2]
2611 removed = [f for f in sorted(removed) if f in m1 or f in m2]
2603 drop = [f for f in removed if f in m]
2612 drop = [f for f in removed if f in m]
2604 for f in drop:
2613 for f in drop:
2605 del m[f]
2614 del m[f]
2606 files = changed + removed
2615 files = changed + removed
2607 md = None
2616 md = None
2608 if not files:
2617 if not files:
2609 # if no "files" actually changed in terms of the changelog,
2618 # if no "files" actually changed in terms of the changelog,
2610 # try hard to detect unmodified manifest entry so that the
2619 # try hard to detect unmodified manifest entry so that the
2611 # exact same commit can be reproduced later on convert.
2620 # exact same commit can be reproduced later on convert.
2612 md = m1.diff(m, scmutil.matchfiles(self, ctx.files()))
2621 md = m1.diff(m, scmutil.matchfiles(self, ctx.files()))
2613 if not files and md:
2622 if not files and md:
2614 self.ui.debug('not reusing manifest (no file change in '
2623 self.ui.debug('not reusing manifest (no file change in '
2615 'changelog, but manifest differs)\n')
2624 'changelog, but manifest differs)\n')
2616 if files or md:
2625 if files or md:
2617 self.ui.note(_("committing manifest\n"))
2626 self.ui.note(_("committing manifest\n"))
2618 # we're using narrowmatch here since it's already applied at
2627 # we're using narrowmatch here since it's already applied at
2619 # other stages (such as dirstate.walk), so we're already
2628 # other stages (such as dirstate.walk), so we're already
2620 # ignoring things outside of narrowspec in most cases. The
2629 # ignoring things outside of narrowspec in most cases. The
2621 # one case where we might have files outside the narrowspec
2630 # one case where we might have files outside the narrowspec
2622 # at this point is merges, and we already error out in the
2631 # at this point is merges, and we already error out in the
2623 # case where the merge has files outside of the narrowspec,
2632 # case where the merge has files outside of the narrowspec,
2624 # so this is safe.
2633 # so this is safe.
2625 mn = mctx.write(trp, linkrev,
2634 mn = mctx.write(trp, linkrev,
2626 p1.manifestnode(), p2.manifestnode(),
2635 p1.manifestnode(), p2.manifestnode(),
2627 added, drop, match=self.narrowmatch())
2636 added, drop, match=self.narrowmatch())
2628 else:
2637 else:
2629 self.ui.debug('reusing manifest form p1 (listed files '
2638 self.ui.debug('reusing manifest form p1 (listed files '
2630 'actually unchanged)\n')
2639 'actually unchanged)\n')
2631 mn = p1.manifestnode()
2640 mn = p1.manifestnode()
2632 else:
2641 else:
2633 self.ui.debug('reusing manifest from p1 (no file change)\n')
2642 self.ui.debug('reusing manifest from p1 (no file change)\n')
2634 mn = p1.manifestnode()
2643 mn = p1.manifestnode()
2635 files = []
2644 files = []
2636
2645
2637 # update changelog
2646 # update changelog
2638 self.ui.note(_("committing changelog\n"))
2647 self.ui.note(_("committing changelog\n"))
2639 self.changelog.delayupdate(tr)
2648 self.changelog.delayupdate(tr)
2640 n = self.changelog.add(mn, files, ctx.description(),
2649 n = self.changelog.add(mn, files, ctx.description(),
2641 trp, p1.node(), p2.node(),
2650 trp, p1.node(), p2.node(),
2642 user, ctx.date(), ctx.extra().copy())
2651 user, ctx.date(), ctx.extra().copy(),
2652 p1copies, p2copies)
2643 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
2653 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
2644 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
2654 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
2645 parent2=xp2)
2655 parent2=xp2)
2646 # set the new commit is proper phase
2656 # set the new commit is proper phase
2647 targetphase = subrepoutil.newcommitphase(self.ui, ctx)
2657 targetphase = subrepoutil.newcommitphase(self.ui, ctx)
2648 if targetphase:
2658 if targetphase:
2649 # retract boundary do not alter parent changeset.
2659 # retract boundary do not alter parent changeset.
2650 # if a parent have higher the resulting phase will
2660 # if a parent have higher the resulting phase will
2651 # be compliant anyway
2661 # be compliant anyway
2652 #
2662 #
2653 # if minimal phase was 0 we don't need to retract anything
2663 # if minimal phase was 0 we don't need to retract anything
2654 phases.registernew(self, tr, targetphase, [n])
2664 phases.registernew(self, tr, targetphase, [n])
2655 return n
2665 return n
2656
2666
2657 @unfilteredmethod
2667 @unfilteredmethod
2658 def destroying(self):
2668 def destroying(self):
2659 '''Inform the repository that nodes are about to be destroyed.
2669 '''Inform the repository that nodes are about to be destroyed.
2660 Intended for use by strip and rollback, so there's a common
2670 Intended for use by strip and rollback, so there's a common
2661 place for anything that has to be done before destroying history.
2671 place for anything that has to be done before destroying history.
2662
2672
2663 This is mostly useful for saving state that is in memory and waiting
2673 This is mostly useful for saving state that is in memory and waiting
2664 to be flushed when the current lock is released. Because a call to
2674 to be flushed when the current lock is released. Because a call to
2665 destroyed is imminent, the repo will be invalidated causing those
2675 destroyed is imminent, the repo will be invalidated causing those
2666 changes to stay in memory (waiting for the next unlock), or vanish
2676 changes to stay in memory (waiting for the next unlock), or vanish
2667 completely.
2677 completely.
2668 '''
2678 '''
2669 # When using the same lock to commit and strip, the phasecache is left
2679 # When using the same lock to commit and strip, the phasecache is left
2670 # dirty after committing. Then when we strip, the repo is invalidated,
2680 # dirty after committing. Then when we strip, the repo is invalidated,
2671 # causing those changes to disappear.
2681 # causing those changes to disappear.
2672 if '_phasecache' in vars(self):
2682 if '_phasecache' in vars(self):
2673 self._phasecache.write()
2683 self._phasecache.write()
2674
2684
2675 @unfilteredmethod
2685 @unfilteredmethod
2676 def destroyed(self):
2686 def destroyed(self):
2677 '''Inform the repository that nodes have been destroyed.
2687 '''Inform the repository that nodes have been destroyed.
2678 Intended for use by strip and rollback, so there's a common
2688 Intended for use by strip and rollback, so there's a common
2679 place for anything that has to be done after destroying history.
2689 place for anything that has to be done after destroying history.
2680 '''
2690 '''
2681 # When one tries to:
2691 # When one tries to:
2682 # 1) destroy nodes thus calling this method (e.g. strip)
2692 # 1) destroy nodes thus calling this method (e.g. strip)
2683 # 2) use phasecache somewhere (e.g. commit)
2693 # 2) use phasecache somewhere (e.g. commit)
2684 #
2694 #
2685 # then 2) will fail because the phasecache contains nodes that were
2695 # then 2) will fail because the phasecache contains nodes that were
2686 # removed. We can either remove phasecache from the filecache,
2696 # removed. We can either remove phasecache from the filecache,
2687 # causing it to reload next time it is accessed, or simply filter
2697 # causing it to reload next time it is accessed, or simply filter
2688 # the removed nodes now and write the updated cache.
2698 # the removed nodes now and write the updated cache.
2689 self._phasecache.filterunknown(self)
2699 self._phasecache.filterunknown(self)
2690 self._phasecache.write()
2700 self._phasecache.write()
2691
2701
2692 # refresh all repository caches
2702 # refresh all repository caches
2693 self.updatecaches()
2703 self.updatecaches()
2694
2704
2695 # Ensure the persistent tag cache is updated. Doing it now
2705 # Ensure the persistent tag cache is updated. Doing it now
2696 # means that the tag cache only has to worry about destroyed
2706 # means that the tag cache only has to worry about destroyed
2697 # heads immediately after a strip/rollback. That in turn
2707 # heads immediately after a strip/rollback. That in turn
2698 # guarantees that "cachetip == currenttip" (comparing both rev
2708 # guarantees that "cachetip == currenttip" (comparing both rev
2699 # and node) always means no nodes have been added or destroyed.
2709 # and node) always means no nodes have been added or destroyed.
2700
2710
2701 # XXX this is suboptimal when qrefresh'ing: we strip the current
2711 # XXX this is suboptimal when qrefresh'ing: we strip the current
2702 # head, refresh the tag cache, then immediately add a new head.
2712 # head, refresh the tag cache, then immediately add a new head.
2703 # But I think doing it this way is necessary for the "instant
2713 # But I think doing it this way is necessary for the "instant
2704 # tag cache retrieval" case to work.
2714 # tag cache retrieval" case to work.
2705 self.invalidate()
2715 self.invalidate()
2706
2716
2707 def status(self, node1='.', node2=None, match=None,
2717 def status(self, node1='.', node2=None, match=None,
2708 ignored=False, clean=False, unknown=False,
2718 ignored=False, clean=False, unknown=False,
2709 listsubrepos=False):
2719 listsubrepos=False):
2710 '''a convenience method that calls node1.status(node2)'''
2720 '''a convenience method that calls node1.status(node2)'''
2711 return self[node1].status(node2, match, ignored, clean, unknown,
2721 return self[node1].status(node2, match, ignored, clean, unknown,
2712 listsubrepos)
2722 listsubrepos)
2713
2723
2714 def addpostdsstatus(self, ps):
2724 def addpostdsstatus(self, ps):
2715 """Add a callback to run within the wlock, at the point at which status
2725 """Add a callback to run within the wlock, at the point at which status
2716 fixups happen.
2726 fixups happen.
2717
2727
2718 On status completion, callback(wctx, status) will be called with the
2728 On status completion, callback(wctx, status) will be called with the
2719 wlock held, unless the dirstate has changed from underneath or the wlock
2729 wlock held, unless the dirstate has changed from underneath or the wlock
2720 couldn't be grabbed.
2730 couldn't be grabbed.
2721
2731
2722 Callbacks should not capture and use a cached copy of the dirstate --
2732 Callbacks should not capture and use a cached copy of the dirstate --
2723 it might change in the meanwhile. Instead, they should access the
2733 it might change in the meanwhile. Instead, they should access the
2724 dirstate via wctx.repo().dirstate.
2734 dirstate via wctx.repo().dirstate.
2725
2735
2726 This list is emptied out after each status run -- extensions should
2736 This list is emptied out after each status run -- extensions should
2727 make sure it adds to this list each time dirstate.status is called.
2737 make sure it adds to this list each time dirstate.status is called.
2728 Extensions should also make sure they don't call this for statuses
2738 Extensions should also make sure they don't call this for statuses
2729 that don't involve the dirstate.
2739 that don't involve the dirstate.
2730 """
2740 """
2731
2741
2732 # The list is located here for uniqueness reasons -- it is actually
2742 # The list is located here for uniqueness reasons -- it is actually
2733 # managed by the workingctx, but that isn't unique per-repo.
2743 # managed by the workingctx, but that isn't unique per-repo.
2734 self._postdsstatus.append(ps)
2744 self._postdsstatus.append(ps)
2735
2745
2736 def postdsstatus(self):
2746 def postdsstatus(self):
2737 """Used by workingctx to get the list of post-dirstate-status hooks."""
2747 """Used by workingctx to get the list of post-dirstate-status hooks."""
2738 return self._postdsstatus
2748 return self._postdsstatus
2739
2749
2740 def clearpostdsstatus(self):
2750 def clearpostdsstatus(self):
2741 """Used by workingctx to clear post-dirstate-status hooks."""
2751 """Used by workingctx to clear post-dirstate-status hooks."""
2742 del self._postdsstatus[:]
2752 del self._postdsstatus[:]
2743
2753
2744 def heads(self, start=None):
2754 def heads(self, start=None):
2745 if start is None:
2755 if start is None:
2746 cl = self.changelog
2756 cl = self.changelog
2747 headrevs = reversed(cl.headrevs())
2757 headrevs = reversed(cl.headrevs())
2748 return [cl.node(rev) for rev in headrevs]
2758 return [cl.node(rev) for rev in headrevs]
2749
2759
2750 heads = self.changelog.heads(start)
2760 heads = self.changelog.heads(start)
2751 # sort the output in rev descending order
2761 # sort the output in rev descending order
2752 return sorted(heads, key=self.changelog.rev, reverse=True)
2762 return sorted(heads, key=self.changelog.rev, reverse=True)
2753
2763
2754 def branchheads(self, branch=None, start=None, closed=False):
2764 def branchheads(self, branch=None, start=None, closed=False):
2755 '''return a (possibly filtered) list of heads for the given branch
2765 '''return a (possibly filtered) list of heads for the given branch
2756
2766
2757 Heads are returned in topological order, from newest to oldest.
2767 Heads are returned in topological order, from newest to oldest.
2758 If branch is None, use the dirstate branch.
2768 If branch is None, use the dirstate branch.
2759 If start is not None, return only heads reachable from start.
2769 If start is not None, return only heads reachable from start.
2760 If closed is True, return heads that are marked as closed as well.
2770 If closed is True, return heads that are marked as closed as well.
2761 '''
2771 '''
2762 if branch is None:
2772 if branch is None:
2763 branch = self[None].branch()
2773 branch = self[None].branch()
2764 branches = self.branchmap()
2774 branches = self.branchmap()
2765 if not branches.hasbranch(branch):
2775 if not branches.hasbranch(branch):
2766 return []
2776 return []
2767 # the cache returns heads ordered lowest to highest
2777 # the cache returns heads ordered lowest to highest
2768 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2778 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2769 if start is not None:
2779 if start is not None:
2770 # filter out the heads that cannot be reached from startrev
2780 # filter out the heads that cannot be reached from startrev
2771 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2781 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2772 bheads = [h for h in bheads if h in fbheads]
2782 bheads = [h for h in bheads if h in fbheads]
2773 return bheads
2783 return bheads
2774
2784
2775 def branches(self, nodes):
2785 def branches(self, nodes):
2776 if not nodes:
2786 if not nodes:
2777 nodes = [self.changelog.tip()]
2787 nodes = [self.changelog.tip()]
2778 b = []
2788 b = []
2779 for n in nodes:
2789 for n in nodes:
2780 t = n
2790 t = n
2781 while True:
2791 while True:
2782 p = self.changelog.parents(n)
2792 p = self.changelog.parents(n)
2783 if p[1] != nullid or p[0] == nullid:
2793 if p[1] != nullid or p[0] == nullid:
2784 b.append((t, n, p[0], p[1]))
2794 b.append((t, n, p[0], p[1]))
2785 break
2795 break
2786 n = p[0]
2796 n = p[0]
2787 return b
2797 return b
2788
2798
2789 def between(self, pairs):
2799 def between(self, pairs):
2790 r = []
2800 r = []
2791
2801
2792 for top, bottom in pairs:
2802 for top, bottom in pairs:
2793 n, l, i = top, [], 0
2803 n, l, i = top, [], 0
2794 f = 1
2804 f = 1
2795
2805
2796 while n != bottom and n != nullid:
2806 while n != bottom and n != nullid:
2797 p = self.changelog.parents(n)[0]
2807 p = self.changelog.parents(n)[0]
2798 if i == f:
2808 if i == f:
2799 l.append(n)
2809 l.append(n)
2800 f = f * 2
2810 f = f * 2
2801 n = p
2811 n = p
2802 i += 1
2812 i += 1
2803
2813
2804 r.append(l)
2814 r.append(l)
2805
2815
2806 return r
2816 return r
2807
2817
2808 def checkpush(self, pushop):
2818 def checkpush(self, pushop):
2809 """Extensions can override this function if additional checks have
2819 """Extensions can override this function if additional checks have
2810 to be performed before pushing, or call it if they override push
2820 to be performed before pushing, or call it if they override push
2811 command.
2821 command.
2812 """
2822 """
2813
2823
2814 @unfilteredpropertycache
2824 @unfilteredpropertycache
2815 def prepushoutgoinghooks(self):
2825 def prepushoutgoinghooks(self):
2816 """Return util.hooks consists of a pushop with repo, remote, outgoing
2826 """Return util.hooks consists of a pushop with repo, remote, outgoing
2817 methods, which are called before pushing changesets.
2827 methods, which are called before pushing changesets.
2818 """
2828 """
2819 return util.hooks()
2829 return util.hooks()
2820
2830
2821 def pushkey(self, namespace, key, old, new):
2831 def pushkey(self, namespace, key, old, new):
2822 try:
2832 try:
2823 tr = self.currenttransaction()
2833 tr = self.currenttransaction()
2824 hookargs = {}
2834 hookargs = {}
2825 if tr is not None:
2835 if tr is not None:
2826 hookargs.update(tr.hookargs)
2836 hookargs.update(tr.hookargs)
2827 hookargs = pycompat.strkwargs(hookargs)
2837 hookargs = pycompat.strkwargs(hookargs)
2828 hookargs[r'namespace'] = namespace
2838 hookargs[r'namespace'] = namespace
2829 hookargs[r'key'] = key
2839 hookargs[r'key'] = key
2830 hookargs[r'old'] = old
2840 hookargs[r'old'] = old
2831 hookargs[r'new'] = new
2841 hookargs[r'new'] = new
2832 self.hook('prepushkey', throw=True, **hookargs)
2842 self.hook('prepushkey', throw=True, **hookargs)
2833 except error.HookAbort as exc:
2843 except error.HookAbort as exc:
2834 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2844 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2835 if exc.hint:
2845 if exc.hint:
2836 self.ui.write_err(_("(%s)\n") % exc.hint)
2846 self.ui.write_err(_("(%s)\n") % exc.hint)
2837 return False
2847 return False
2838 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2848 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2839 ret = pushkey.push(self, namespace, key, old, new)
2849 ret = pushkey.push(self, namespace, key, old, new)
2840 def runhook():
2850 def runhook():
2841 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2851 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2842 ret=ret)
2852 ret=ret)
2843 self._afterlock(runhook)
2853 self._afterlock(runhook)
2844 return ret
2854 return ret
2845
2855
2846 def listkeys(self, namespace):
2856 def listkeys(self, namespace):
2847 self.hook('prelistkeys', throw=True, namespace=namespace)
2857 self.hook('prelistkeys', throw=True, namespace=namespace)
2848 self.ui.debug('listing keys for "%s"\n' % namespace)
2858 self.ui.debug('listing keys for "%s"\n' % namespace)
2849 values = pushkey.list(self, namespace)
2859 values = pushkey.list(self, namespace)
2850 self.hook('listkeys', namespace=namespace, values=values)
2860 self.hook('listkeys', namespace=namespace, values=values)
2851 return values
2861 return values
2852
2862
2853 def debugwireargs(self, one, two, three=None, four=None, five=None):
2863 def debugwireargs(self, one, two, three=None, four=None, five=None):
2854 '''used to test argument passing over the wire'''
2864 '''used to test argument passing over the wire'''
2855 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
2865 return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
2856 pycompat.bytestr(four),
2866 pycompat.bytestr(four),
2857 pycompat.bytestr(five))
2867 pycompat.bytestr(five))
2858
2868
2859 def savecommitmessage(self, text):
2869 def savecommitmessage(self, text):
2860 fp = self.vfs('last-message.txt', 'wb')
2870 fp = self.vfs('last-message.txt', 'wb')
2861 try:
2871 try:
2862 fp.write(text)
2872 fp.write(text)
2863 finally:
2873 finally:
2864 fp.close()
2874 fp.close()
2865 return self.pathto(fp.name[len(self.root) + 1:])
2875 return self.pathto(fp.name[len(self.root) + 1:])
2866
2876
2867 # used to avoid circular references so destructors work
2877 # used to avoid circular references so destructors work
2868 def aftertrans(files):
2878 def aftertrans(files):
2869 renamefiles = [tuple(t) for t in files]
2879 renamefiles = [tuple(t) for t in files]
2870 def a():
2880 def a():
2871 for vfs, src, dest in renamefiles:
2881 for vfs, src, dest in renamefiles:
2872 # if src and dest refer to a same file, vfs.rename is a no-op,
2882 # if src and dest refer to a same file, vfs.rename is a no-op,
2873 # leaving both src and dest on disk. delete dest to make sure
2883 # leaving both src and dest on disk. delete dest to make sure
2874 # the rename couldn't be such a no-op.
2884 # the rename couldn't be such a no-op.
2875 vfs.tryunlink(dest)
2885 vfs.tryunlink(dest)
2876 try:
2886 try:
2877 vfs.rename(src, dest)
2887 vfs.rename(src, dest)
2878 except OSError: # journal file does not yet exist
2888 except OSError: # journal file does not yet exist
2879 pass
2889 pass
2880 return a
2890 return a
2881
2891
2882 def undoname(fn):
2892 def undoname(fn):
2883 base, name = os.path.split(fn)
2893 base, name = os.path.split(fn)
2884 assert name.startswith('journal')
2894 assert name.startswith('journal')
2885 return os.path.join(base, name.replace('journal', 'undo', 1))
2895 return os.path.join(base, name.replace('journal', 'undo', 1))
2886
2896
2887 def instance(ui, path, create, intents=None, createopts=None):
2897 def instance(ui, path, create, intents=None, createopts=None):
2888 localpath = util.urllocalpath(path)
2898 localpath = util.urllocalpath(path)
2889 if create:
2899 if create:
2890 createrepository(ui, localpath, createopts=createopts)
2900 createrepository(ui, localpath, createopts=createopts)
2891
2901
2892 return makelocalrepository(ui, localpath, intents=intents)
2902 return makelocalrepository(ui, localpath, intents=intents)
2893
2903
2894 def islocal(path):
2904 def islocal(path):
2895 return True
2905 return True
2896
2906
2897 def defaultcreateopts(ui, createopts=None):
2907 def defaultcreateopts(ui, createopts=None):
2898 """Populate the default creation options for a repository.
2908 """Populate the default creation options for a repository.
2899
2909
2900 A dictionary of explicitly requested creation options can be passed
2910 A dictionary of explicitly requested creation options can be passed
2901 in. Missing keys will be populated.
2911 in. Missing keys will be populated.
2902 """
2912 """
2903 createopts = dict(createopts or {})
2913 createopts = dict(createopts or {})
2904
2914
2905 if 'backend' not in createopts:
2915 if 'backend' not in createopts:
2906 # experimental config: storage.new-repo-backend
2916 # experimental config: storage.new-repo-backend
2907 createopts['backend'] = ui.config('storage', 'new-repo-backend')
2917 createopts['backend'] = ui.config('storage', 'new-repo-backend')
2908
2918
2909 return createopts
2919 return createopts
2910
2920
2911 def newreporequirements(ui, createopts):
2921 def newreporequirements(ui, createopts):
2912 """Determine the set of requirements for a new local repository.
2922 """Determine the set of requirements for a new local repository.
2913
2923
2914 Extensions can wrap this function to specify custom requirements for
2924 Extensions can wrap this function to specify custom requirements for
2915 new repositories.
2925 new repositories.
2916 """
2926 """
2917 # If the repo is being created from a shared repository, we copy
2927 # If the repo is being created from a shared repository, we copy
2918 # its requirements.
2928 # its requirements.
2919 if 'sharedrepo' in createopts:
2929 if 'sharedrepo' in createopts:
2920 requirements = set(createopts['sharedrepo'].requirements)
2930 requirements = set(createopts['sharedrepo'].requirements)
2921 if createopts.get('sharedrelative'):
2931 if createopts.get('sharedrelative'):
2922 requirements.add('relshared')
2932 requirements.add('relshared')
2923 else:
2933 else:
2924 requirements.add('shared')
2934 requirements.add('shared')
2925
2935
2926 return requirements
2936 return requirements
2927
2937
2928 if 'backend' not in createopts:
2938 if 'backend' not in createopts:
2929 raise error.ProgrammingError('backend key not present in createopts; '
2939 raise error.ProgrammingError('backend key not present in createopts; '
2930 'was defaultcreateopts() called?')
2940 'was defaultcreateopts() called?')
2931
2941
2932 if createopts['backend'] != 'revlogv1':
2942 if createopts['backend'] != 'revlogv1':
2933 raise error.Abort(_('unable to determine repository requirements for '
2943 raise error.Abort(_('unable to determine repository requirements for '
2934 'storage backend: %s') % createopts['backend'])
2944 'storage backend: %s') % createopts['backend'])
2935
2945
2936 requirements = {'revlogv1'}
2946 requirements = {'revlogv1'}
2937 if ui.configbool('format', 'usestore'):
2947 if ui.configbool('format', 'usestore'):
2938 requirements.add('store')
2948 requirements.add('store')
2939 if ui.configbool('format', 'usefncache'):
2949 if ui.configbool('format', 'usefncache'):
2940 requirements.add('fncache')
2950 requirements.add('fncache')
2941 if ui.configbool('format', 'dotencode'):
2951 if ui.configbool('format', 'dotencode'):
2942 requirements.add('dotencode')
2952 requirements.add('dotencode')
2943
2953
2944 compengine = ui.config('format', 'revlog-compression')
2954 compengine = ui.config('format', 'revlog-compression')
2945 if compengine not in util.compengines:
2955 if compengine not in util.compengines:
2946 raise error.Abort(_('compression engine %s defined by '
2956 raise error.Abort(_('compression engine %s defined by '
2947 'format.revlog-compression not available') %
2957 'format.revlog-compression not available') %
2948 compengine,
2958 compengine,
2949 hint=_('run "hg debuginstall" to list available '
2959 hint=_('run "hg debuginstall" to list available '
2950 'compression engines'))
2960 'compression engines'))
2951
2961
2952 # zlib is the historical default and doesn't need an explicit requirement.
2962 # zlib is the historical default and doesn't need an explicit requirement.
2953 elif compengine == 'zstd':
2963 elif compengine == 'zstd':
2954 requirements.add('revlog-compression-zstd')
2964 requirements.add('revlog-compression-zstd')
2955 elif compengine != 'zlib':
2965 elif compengine != 'zlib':
2956 requirements.add('exp-compression-%s' % compengine)
2966 requirements.add('exp-compression-%s' % compengine)
2957
2967
2958 if scmutil.gdinitconfig(ui):
2968 if scmutil.gdinitconfig(ui):
2959 requirements.add('generaldelta')
2969 requirements.add('generaldelta')
2960 if ui.configbool('format', 'sparse-revlog'):
2970 if ui.configbool('format', 'sparse-revlog'):
2961 requirements.add(SPARSEREVLOG_REQUIREMENT)
2971 requirements.add(SPARSEREVLOG_REQUIREMENT)
2962 if ui.configbool('experimental', 'treemanifest'):
2972 if ui.configbool('experimental', 'treemanifest'):
2963 requirements.add('treemanifest')
2973 requirements.add('treemanifest')
2964
2974
2965 revlogv2 = ui.config('experimental', 'revlogv2')
2975 revlogv2 = ui.config('experimental', 'revlogv2')
2966 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2976 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2967 requirements.remove('revlogv1')
2977 requirements.remove('revlogv1')
2968 # generaldelta is implied by revlogv2.
2978 # generaldelta is implied by revlogv2.
2969 requirements.discard('generaldelta')
2979 requirements.discard('generaldelta')
2970 requirements.add(REVLOGV2_REQUIREMENT)
2980 requirements.add(REVLOGV2_REQUIREMENT)
2971 # experimental config: format.internal-phase
2981 # experimental config: format.internal-phase
2972 if ui.configbool('format', 'internal-phase'):
2982 if ui.configbool('format', 'internal-phase'):
2973 requirements.add('internal-phase')
2983 requirements.add('internal-phase')
2974
2984
2975 if createopts.get('narrowfiles'):
2985 if createopts.get('narrowfiles'):
2976 requirements.add(repository.NARROW_REQUIREMENT)
2986 requirements.add(repository.NARROW_REQUIREMENT)
2977
2987
2978 if createopts.get('lfs'):
2988 if createopts.get('lfs'):
2979 requirements.add('lfs')
2989 requirements.add('lfs')
2980
2990
2981 return requirements
2991 return requirements
2982
2992
2983 def filterknowncreateopts(ui, createopts):
2993 def filterknowncreateopts(ui, createopts):
2984 """Filters a dict of repo creation options against options that are known.
2994 """Filters a dict of repo creation options against options that are known.
2985
2995
2986 Receives a dict of repo creation options and returns a dict of those
2996 Receives a dict of repo creation options and returns a dict of those
2987 options that we don't know how to handle.
2997 options that we don't know how to handle.
2988
2998
2989 This function is called as part of repository creation. If the
2999 This function is called as part of repository creation. If the
2990 returned dict contains any items, repository creation will not
3000 returned dict contains any items, repository creation will not
2991 be allowed, as it means there was a request to create a repository
3001 be allowed, as it means there was a request to create a repository
2992 with options not recognized by loaded code.
3002 with options not recognized by loaded code.
2993
3003
2994 Extensions can wrap this function to filter out creation options
3004 Extensions can wrap this function to filter out creation options
2995 they know how to handle.
3005 they know how to handle.
2996 """
3006 """
2997 known = {
3007 known = {
2998 'backend',
3008 'backend',
2999 'lfs',
3009 'lfs',
3000 'narrowfiles',
3010 'narrowfiles',
3001 'sharedrepo',
3011 'sharedrepo',
3002 'sharedrelative',
3012 'sharedrelative',
3003 'shareditems',
3013 'shareditems',
3004 'shallowfilestore',
3014 'shallowfilestore',
3005 }
3015 }
3006
3016
3007 return {k: v for k, v in createopts.items() if k not in known}
3017 return {k: v for k, v in createopts.items() if k not in known}
3008
3018
3009 def createrepository(ui, path, createopts=None):
3019 def createrepository(ui, path, createopts=None):
3010 """Create a new repository in a vfs.
3020 """Create a new repository in a vfs.
3011
3021
3012 ``path`` path to the new repo's working directory.
3022 ``path`` path to the new repo's working directory.
3013 ``createopts`` options for the new repository.
3023 ``createopts`` options for the new repository.
3014
3024
3015 The following keys for ``createopts`` are recognized:
3025 The following keys for ``createopts`` are recognized:
3016
3026
3017 backend
3027 backend
3018 The storage backend to use.
3028 The storage backend to use.
3019 lfs
3029 lfs
3020 Repository will be created with ``lfs`` requirement. The lfs extension
3030 Repository will be created with ``lfs`` requirement. The lfs extension
3021 will automatically be loaded when the repository is accessed.
3031 will automatically be loaded when the repository is accessed.
3022 narrowfiles
3032 narrowfiles
3023 Set up repository to support narrow file storage.
3033 Set up repository to support narrow file storage.
3024 sharedrepo
3034 sharedrepo
3025 Repository object from which storage should be shared.
3035 Repository object from which storage should be shared.
3026 sharedrelative
3036 sharedrelative
3027 Boolean indicating if the path to the shared repo should be
3037 Boolean indicating if the path to the shared repo should be
3028 stored as relative. By default, the pointer to the "parent" repo
3038 stored as relative. By default, the pointer to the "parent" repo
3029 is stored as an absolute path.
3039 is stored as an absolute path.
3030 shareditems
3040 shareditems
3031 Set of items to share to the new repository (in addition to storage).
3041 Set of items to share to the new repository (in addition to storage).
3032 shallowfilestore
3042 shallowfilestore
3033 Indicates that storage for files should be shallow (not all ancestor
3043 Indicates that storage for files should be shallow (not all ancestor
3034 revisions are known).
3044 revisions are known).
3035 """
3045 """
3036 createopts = defaultcreateopts(ui, createopts=createopts)
3046 createopts = defaultcreateopts(ui, createopts=createopts)
3037
3047
3038 unknownopts = filterknowncreateopts(ui, createopts)
3048 unknownopts = filterknowncreateopts(ui, createopts)
3039
3049
3040 if not isinstance(unknownopts, dict):
3050 if not isinstance(unknownopts, dict):
3041 raise error.ProgrammingError('filterknowncreateopts() did not return '
3051 raise error.ProgrammingError('filterknowncreateopts() did not return '
3042 'a dict')
3052 'a dict')
3043
3053
3044 if unknownopts:
3054 if unknownopts:
3045 raise error.Abort(_('unable to create repository because of unknown '
3055 raise error.Abort(_('unable to create repository because of unknown '
3046 'creation option: %s') %
3056 'creation option: %s') %
3047 ', '.join(sorted(unknownopts)),
3057 ', '.join(sorted(unknownopts)),
3048 hint=_('is a required extension not loaded?'))
3058 hint=_('is a required extension not loaded?'))
3049
3059
3050 requirements = newreporequirements(ui, createopts=createopts)
3060 requirements = newreporequirements(ui, createopts=createopts)
3051
3061
3052 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
3062 wdirvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
3053
3063
3054 hgvfs = vfsmod.vfs(wdirvfs.join(b'.hg'))
3064 hgvfs = vfsmod.vfs(wdirvfs.join(b'.hg'))
3055 if hgvfs.exists():
3065 if hgvfs.exists():
3056 raise error.RepoError(_('repository %s already exists') % path)
3066 raise error.RepoError(_('repository %s already exists') % path)
3057
3067
3058 if 'sharedrepo' in createopts:
3068 if 'sharedrepo' in createopts:
3059 sharedpath = createopts['sharedrepo'].sharedpath
3069 sharedpath = createopts['sharedrepo'].sharedpath
3060
3070
3061 if createopts.get('sharedrelative'):
3071 if createopts.get('sharedrelative'):
3062 try:
3072 try:
3063 sharedpath = os.path.relpath(sharedpath, hgvfs.base)
3073 sharedpath = os.path.relpath(sharedpath, hgvfs.base)
3064 except (IOError, ValueError) as e:
3074 except (IOError, ValueError) as e:
3065 # ValueError is raised on Windows if the drive letters differ
3075 # ValueError is raised on Windows if the drive letters differ
3066 # on each path.
3076 # on each path.
3067 raise error.Abort(_('cannot calculate relative path'),
3077 raise error.Abort(_('cannot calculate relative path'),
3068 hint=stringutil.forcebytestr(e))
3078 hint=stringutil.forcebytestr(e))
3069
3079
3070 if not wdirvfs.exists():
3080 if not wdirvfs.exists():
3071 wdirvfs.makedirs()
3081 wdirvfs.makedirs()
3072
3082
3073 hgvfs.makedir(notindexed=True)
3083 hgvfs.makedir(notindexed=True)
3074 if 'sharedrepo' not in createopts:
3084 if 'sharedrepo' not in createopts:
3075 hgvfs.mkdir(b'cache')
3085 hgvfs.mkdir(b'cache')
3076 hgvfs.mkdir(b'wcache')
3086 hgvfs.mkdir(b'wcache')
3077
3087
3078 if b'store' in requirements and 'sharedrepo' not in createopts:
3088 if b'store' in requirements and 'sharedrepo' not in createopts:
3079 hgvfs.mkdir(b'store')
3089 hgvfs.mkdir(b'store')
3080
3090
3081 # We create an invalid changelog outside the store so very old
3091 # We create an invalid changelog outside the store so very old
3082 # Mercurial versions (which didn't know about the requirements
3092 # Mercurial versions (which didn't know about the requirements
3083 # file) encounter an error on reading the changelog. This
3093 # file) encounter an error on reading the changelog. This
3084 # effectively locks out old clients and prevents them from
3094 # effectively locks out old clients and prevents them from
3085 # mucking with a repo in an unknown format.
3095 # mucking with a repo in an unknown format.
3086 #
3096 #
3087 # The revlog header has version 2, which won't be recognized by
3097 # The revlog header has version 2, which won't be recognized by
3088 # such old clients.
3098 # such old clients.
3089 hgvfs.append(b'00changelog.i',
3099 hgvfs.append(b'00changelog.i',
3090 b'\0\0\0\2 dummy changelog to prevent using the old repo '
3100 b'\0\0\0\2 dummy changelog to prevent using the old repo '
3091 b'layout')
3101 b'layout')
3092
3102
3093 scmutil.writerequires(hgvfs, requirements)
3103 scmutil.writerequires(hgvfs, requirements)
3094
3104
3095 # Write out file telling readers where to find the shared store.
3105 # Write out file telling readers where to find the shared store.
3096 if 'sharedrepo' in createopts:
3106 if 'sharedrepo' in createopts:
3097 hgvfs.write(b'sharedpath', sharedpath)
3107 hgvfs.write(b'sharedpath', sharedpath)
3098
3108
3099 if createopts.get('shareditems'):
3109 if createopts.get('shareditems'):
3100 shared = b'\n'.join(sorted(createopts['shareditems'])) + b'\n'
3110 shared = b'\n'.join(sorted(createopts['shareditems'])) + b'\n'
3101 hgvfs.write(b'shared', shared)
3111 hgvfs.write(b'shared', shared)
3102
3112
3103 def poisonrepository(repo):
3113 def poisonrepository(repo):
3104 """Poison a repository instance so it can no longer be used."""
3114 """Poison a repository instance so it can no longer be used."""
3105 # Perform any cleanup on the instance.
3115 # Perform any cleanup on the instance.
3106 repo.close()
3116 repo.close()
3107
3117
3108 # Our strategy is to replace the type of the object with one that
3118 # Our strategy is to replace the type of the object with one that
3109 # has all attribute lookups result in error.
3119 # has all attribute lookups result in error.
3110 #
3120 #
3111 # But we have to allow the close() method because some constructors
3121 # But we have to allow the close() method because some constructors
3112 # of repos call close() on repo references.
3122 # of repos call close() on repo references.
3113 class poisonedrepository(object):
3123 class poisonedrepository(object):
3114 def __getattribute__(self, item):
3124 def __getattribute__(self, item):
3115 if item == r'close':
3125 if item == r'close':
3116 return object.__getattribute__(self, item)
3126 return object.__getattribute__(self, item)
3117
3127
3118 raise error.ProgrammingError('repo instances should not be used '
3128 raise error.ProgrammingError('repo instances should not be used '
3119 'after unshare')
3129 'after unshare')
3120
3130
3121 def close(self):
3131 def close(self):
3122 pass
3132 pass
3123
3133
3124 # We may have a repoview, which intercepts __setattr__. So be sure
3134 # We may have a repoview, which intercepts __setattr__. So be sure
3125 # we operate at the lowest level possible.
3135 # we operate at the lowest level possible.
3126 object.__setattr__(repo, r'__class__', poisonedrepository)
3136 object.__setattr__(repo, r'__class__', poisonedrepository)
@@ -1,1080 +1,1080
1 $ HGMERGE=true; export HGMERGE
1 $ HGMERGE=true; export HGMERGE
2
2
3 init
3 init
4
4
5 $ hg init repo
5 $ hg init repo
6 $ cd repo
6 $ cd repo
7
7
8 commit
8 commit
9
9
10 $ echo 'a' > a
10 $ echo 'a' > a
11 $ hg ci -A -m test -u nobody -d '1 0'
11 $ hg ci -A -m test -u nobody -d '1 0'
12 adding a
12 adding a
13
13
14 annotate -c
14 annotate -c
15
15
16 $ hg annotate -c a
16 $ hg annotate -c a
17 8435f90966e4: a
17 8435f90966e4: a
18
18
19 annotate -cl
19 annotate -cl
20
20
21 $ hg annotate -cl a
21 $ hg annotate -cl a
22 8435f90966e4:1: a
22 8435f90966e4:1: a
23
23
24 annotate -d
24 annotate -d
25
25
26 $ hg annotate -d a
26 $ hg annotate -d a
27 Thu Jan 01 00:00:01 1970 +0000: a
27 Thu Jan 01 00:00:01 1970 +0000: a
28
28
29 annotate -n
29 annotate -n
30
30
31 $ hg annotate -n a
31 $ hg annotate -n a
32 0: a
32 0: a
33
33
34 annotate -nl
34 annotate -nl
35
35
36 $ hg annotate -nl a
36 $ hg annotate -nl a
37 0:1: a
37 0:1: a
38
38
39 annotate -u
39 annotate -u
40
40
41 $ hg annotate -u a
41 $ hg annotate -u a
42 nobody: a
42 nobody: a
43
43
44 annotate -cdnu
44 annotate -cdnu
45
45
46 $ hg annotate -cdnu a
46 $ hg annotate -cdnu a
47 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a
47 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a
48
48
49 annotate -cdnul
49 annotate -cdnul
50
50
51 $ hg annotate -cdnul a
51 $ hg annotate -cdnul a
52 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a
52 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a
53
53
54 annotate (JSON)
54 annotate (JSON)
55
55
56 $ hg annotate -Tjson a
56 $ hg annotate -Tjson a
57 [
57 [
58 {
58 {
59 "lines": [{"line": "a\n", "rev": 0}],
59 "lines": [{"line": "a\n", "rev": 0}],
60 "path": "a"
60 "path": "a"
61 }
61 }
62 ]
62 ]
63
63
64 $ hg annotate -Tjson -cdfnul a
64 $ hg annotate -Tjson -cdfnul a
65 [
65 [
66 {
66 {
67 "lines": [{"date": [1.0, 0], "line": "a\n", "lineno": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "path": "a", "rev": 0, "user": "nobody"}],
67 "lines": [{"date": [1.0, 0], "line": "a\n", "lineno": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "path": "a", "rev": 0, "user": "nobody"}],
68 "path": "a"
68 "path": "a"
69 }
69 }
70 ]
70 ]
71
71
72 log-like templating
72 log-like templating
73
73
74 $ hg annotate -T'{lines % "{rev} {node|shortest}: {line}"}' a
74 $ hg annotate -T'{lines % "{rev} {node|shortest}: {line}"}' a
75 0 8435: a
75 0 8435: a
76
76
77 '{lineno}' field should be populated as necessary
77 '{lineno}' field should be populated as necessary
78
78
79 $ hg annotate -T'{lines % "{rev}:{lineno}: {line}"}' a
79 $ hg annotate -T'{lines % "{rev}:{lineno}: {line}"}' a
80 0:1: a
80 0:1: a
81 $ hg annotate -Ta a \
81 $ hg annotate -Ta a \
82 > --config templates.a='"{lines % "{rev}:{lineno}: {line}"}"'
82 > --config templates.a='"{lines % "{rev}:{lineno}: {line}"}"'
83 0:1: a
83 0:1: a
84
84
85 $ cat <<EOF >>a
85 $ cat <<EOF >>a
86 > a
86 > a
87 > a
87 > a
88 > EOF
88 > EOF
89 $ hg ci -ma1 -d '1 0'
89 $ hg ci -ma1 -d '1 0'
90 $ hg cp a b
90 $ hg cp a b
91 $ hg ci -mb -d '1 0'
91 $ hg ci -mb -d '1 0'
92 $ cat <<EOF >> b
92 $ cat <<EOF >> b
93 > b4
93 > b4
94 > b5
94 > b5
95 > b6
95 > b6
96 > EOF
96 > EOF
97 $ hg ci -mb2 -d '2 0'
97 $ hg ci -mb2 -d '2 0'
98
98
99 default output of '{lines}' should be readable
99 default output of '{lines}' should be readable
100
100
101 $ hg annotate -T'{lines}' a
101 $ hg annotate -T'{lines}' a
102 0: a
102 0: a
103 1: a
103 1: a
104 1: a
104 1: a
105 $ hg annotate -T'{join(lines, "\n")}' a
105 $ hg annotate -T'{join(lines, "\n")}' a
106 0: a
106 0: a
107
107
108 1: a
108 1: a
109
109
110 1: a
110 1: a
111
111
112 several filters can be applied to '{lines}'
112 several filters can be applied to '{lines}'
113
113
114 $ hg annotate -T'{lines|json}\n' a
114 $ hg annotate -T'{lines|json}\n' a
115 [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}]
115 [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}]
116 $ hg annotate -T'{lines|stringify}' a
116 $ hg annotate -T'{lines|stringify}' a
117 0: a
117 0: a
118 1: a
118 1: a
119 1: a
119 1: a
120 $ hg annotate -T'{lines|count}\n' a
120 $ hg annotate -T'{lines|count}\n' a
121 3
121 3
122
122
123 annotate multiple files (JSON)
123 annotate multiple files (JSON)
124
124
125 $ hg annotate -Tjson a b
125 $ hg annotate -Tjson a b
126 [
126 [
127 {
127 {
128 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}],
128 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}],
129 "path": "a"
129 "path": "a"
130 },
130 },
131 {
131 {
132 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}, {"line": "b4\n", "rev": 3}, {"line": "b5\n", "rev": 3}, {"line": "b6\n", "rev": 3}],
132 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}, {"line": "b4\n", "rev": 3}, {"line": "b5\n", "rev": 3}, {"line": "b6\n", "rev": 3}],
133 "path": "b"
133 "path": "b"
134 }
134 }
135 ]
135 ]
136
136
137 annotate multiple files (template)
137 annotate multiple files (template)
138
138
139 $ hg annotate -T'== {path} ==\n{lines % "{rev}: {line}"}' a b
139 $ hg annotate -T'== {path} ==\n{lines % "{rev}: {line}"}' a b
140 == a ==
140 == a ==
141 0: a
141 0: a
142 1: a
142 1: a
143 1: a
143 1: a
144 == b ==
144 == b ==
145 0: a
145 0: a
146 1: a
146 1: a
147 1: a
147 1: a
148 3: b4
148 3: b4
149 3: b5
149 3: b5
150 3: b6
150 3: b6
151
151
152 annotate -n b
152 annotate -n b
153
153
154 $ hg annotate -n b
154 $ hg annotate -n b
155 0: a
155 0: a
156 1: a
156 1: a
157 1: a
157 1: a
158 3: b4
158 3: b4
159 3: b5
159 3: b5
160 3: b6
160 3: b6
161
161
162 annotate --no-follow b
162 annotate --no-follow b
163
163
164 $ hg annotate --no-follow b
164 $ hg annotate --no-follow b
165 2: a
165 2: a
166 2: a
166 2: a
167 2: a
167 2: a
168 3: b4
168 3: b4
169 3: b5
169 3: b5
170 3: b6
170 3: b6
171
171
172 annotate -nl b
172 annotate -nl b
173
173
174 $ hg annotate -nl b
174 $ hg annotate -nl b
175 0:1: a
175 0:1: a
176 1:2: a
176 1:2: a
177 1:3: a
177 1:3: a
178 3:4: b4
178 3:4: b4
179 3:5: b5
179 3:5: b5
180 3:6: b6
180 3:6: b6
181
181
182 annotate -nf b
182 annotate -nf b
183
183
184 $ hg annotate -nf b
184 $ hg annotate -nf b
185 0 a: a
185 0 a: a
186 1 a: a
186 1 a: a
187 1 a: a
187 1 a: a
188 3 b: b4
188 3 b: b4
189 3 b: b5
189 3 b: b5
190 3 b: b6
190 3 b: b6
191
191
192 annotate -nlf b
192 annotate -nlf b
193
193
194 $ hg annotate -nlf b
194 $ hg annotate -nlf b
195 0 a:1: a
195 0 a:1: a
196 1 a:2: a
196 1 a:2: a
197 1 a:3: a
197 1 a:3: a
198 3 b:4: b4
198 3 b:4: b4
199 3 b:5: b5
199 3 b:5: b5
200 3 b:6: b6
200 3 b:6: b6
201
201
202 $ hg up -C 2
202 $ hg up -C 2
203 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
203 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
204 $ cat <<EOF >> b
204 $ cat <<EOF >> b
205 > b4
205 > b4
206 > c
206 > c
207 > b5
207 > b5
208 > EOF
208 > EOF
209 $ hg ci -mb2.1 -d '2 0'
209 $ hg ci -mb2.1 -d '2 0'
210 created new head
210 created new head
211 $ hg merge
211 $ hg merge
212 merging b
212 merging b
213 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
213 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
214 (branch merge, don't forget to commit)
214 (branch merge, don't forget to commit)
215 $ hg ci -mmergeb -d '3 0'
215 $ hg ci -mmergeb -d '3 0'
216
216
217 annotate after merge
217 annotate after merge
218
218
219 $ hg annotate -nf b
219 $ hg annotate -nf b
220 0 a: a
220 0 a: a
221 1 a: a
221 1 a: a
222 1 a: a
222 1 a: a
223 3 b: b4
223 3 b: b4
224 4 b: c
224 4 b: c
225 3 b: b5
225 3 b: b5
226
226
227 annotate after merge with -l
227 annotate after merge with -l
228
228
229 $ hg annotate -nlf b
229 $ hg annotate -nlf b
230 0 a:1: a
230 0 a:1: a
231 1 a:2: a
231 1 a:2: a
232 1 a:3: a
232 1 a:3: a
233 3 b:4: b4
233 3 b:4: b4
234 4 b:5: c
234 4 b:5: c
235 3 b:5: b5
235 3 b:5: b5
236
236
237 $ hg up -C 1
237 $ hg up -C 1
238 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
238 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
239 $ hg cp a b
239 $ hg cp a b
240 $ cat <<EOF > b
240 $ cat <<EOF > b
241 > a
241 > a
242 > z
242 > z
243 > a
243 > a
244 > EOF
244 > EOF
245 $ hg ci -mc -d '3 0'
245 $ hg ci -mc -d '3 0'
246 created new head
246 created new head
247 $ hg merge
247 $ hg merge
248 merging b
248 merging b
249 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
249 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
250 (branch merge, don't forget to commit)
250 (branch merge, don't forget to commit)
251 $ cat <<EOF >> b
251 $ cat <<EOF >> b
252 > b4
252 > b4
253 > c
253 > c
254 > b5
254 > b5
255 > EOF
255 > EOF
256 $ echo d >> b
256 $ echo d >> b
257 $ hg ci -mmerge2 -d '4 0'
257 $ hg ci -mmerge2 -d '4 0'
258
258
259 annotate after rename merge
259 annotate after rename merge
260
260
261 $ hg annotate -nf b
261 $ hg annotate -nf b
262 0 a: a
262 0 a: a
263 6 b: z
263 6 b: z
264 1 a: a
264 1 a: a
265 3 b: b4
265 3 b: b4
266 4 b: c
266 4 b: c
267 3 b: b5
267 3 b: b5
268 7 b: d
268 7 b: d
269
269
270 annotate after rename merge with -l
270 annotate after rename merge with -l
271
271
272 $ hg annotate -nlf b
272 $ hg annotate -nlf b
273 0 a:1: a
273 0 a:1: a
274 6 b:2: z
274 6 b:2: z
275 1 a:3: a
275 1 a:3: a
276 3 b:4: b4
276 3 b:4: b4
277 4 b:5: c
277 4 b:5: c
278 3 b:5: b5
278 3 b:5: b5
279 7 b:7: d
279 7 b:7: d
280
280
281 --skip nothing (should be the same as no --skip at all)
281 --skip nothing (should be the same as no --skip at all)
282
282
283 $ hg annotate -nlf b --skip '1::0'
283 $ hg annotate -nlf b --skip '1::0'
284 0 a:1: a
284 0 a:1: a
285 6 b:2: z
285 6 b:2: z
286 1 a:3: a
286 1 a:3: a
287 3 b:4: b4
287 3 b:4: b4
288 4 b:5: c
288 4 b:5: c
289 3 b:5: b5
289 3 b:5: b5
290 7 b:7: d
290 7 b:7: d
291
291
292 --skip a modified line. Note a slight behavior difference in pure - this is
292 --skip a modified line. Note a slight behavior difference in pure - this is
293 because the pure code comes up with slightly different deltas internally.
293 because the pure code comes up with slightly different deltas internally.
294
294
295 $ hg annotate -nlf b --skip 6
295 $ hg annotate -nlf b --skip 6
296 0 a:1: a
296 0 a:1: a
297 1 a:2* z (no-pure !)
297 1 a:2* z (no-pure !)
298 0 a:1* z (pure !)
298 0 a:1* z (pure !)
299 1 a:3: a
299 1 a:3: a
300 3 b:4: b4
300 3 b:4: b4
301 4 b:5: c
301 4 b:5: c
302 3 b:5: b5
302 3 b:5: b5
303 7 b:7: d
303 7 b:7: d
304
304
305 --skip added lines (and test multiple skip)
305 --skip added lines (and test multiple skip)
306
306
307 $ hg annotate -nlf b --skip 3
307 $ hg annotate -nlf b --skip 3
308 0 a:1: a
308 0 a:1: a
309 6 b:2: z
309 6 b:2: z
310 1 a:3: a
310 1 a:3: a
311 1 a:3* b4
311 1 a:3* b4
312 4 b:5: c
312 4 b:5: c
313 1 a:3* b5
313 1 a:3* b5
314 7 b:7: d
314 7 b:7: d
315
315
316 $ hg annotate -nlf b --skip 4
316 $ hg annotate -nlf b --skip 4
317 0 a:1: a
317 0 a:1: a
318 6 b:2: z
318 6 b:2: z
319 1 a:3: a
319 1 a:3: a
320 3 b:4: b4
320 3 b:4: b4
321 1 a:3* c
321 1 a:3* c
322 3 b:5: b5
322 3 b:5: b5
323 7 b:7: d
323 7 b:7: d
324
324
325 $ hg annotate -nlf b --skip 3 --skip 4
325 $ hg annotate -nlf b --skip 3 --skip 4
326 0 a:1: a
326 0 a:1: a
327 6 b:2: z
327 6 b:2: z
328 1 a:3: a
328 1 a:3: a
329 1 a:3* b4
329 1 a:3* b4
330 1 a:3* c
330 1 a:3* c
331 1 a:3* b5
331 1 a:3* b5
332 7 b:7: d
332 7 b:7: d
333
333
334 $ hg annotate -nlf b --skip 'merge()'
334 $ hg annotate -nlf b --skip 'merge()'
335 0 a:1: a
335 0 a:1: a
336 6 b:2: z
336 6 b:2: z
337 1 a:3: a
337 1 a:3: a
338 3 b:4: b4
338 3 b:4: b4
339 4 b:5: c
339 4 b:5: c
340 3 b:5: b5
340 3 b:5: b5
341 3 b:5* d
341 3 b:5* d
342
342
343 --skip everything -- use the revision the file was introduced in
343 --skip everything -- use the revision the file was introduced in
344
344
345 $ hg annotate -nlf b --skip 'all()'
345 $ hg annotate -nlf b --skip 'all()'
346 0 a:1: a
346 0 a:1: a
347 0 a:1* z
347 0 a:1* z
348 0 a:1* a
348 0 a:1* a
349 0 a:1* b4
349 0 a:1* b4
350 0 a:1* c
350 0 a:1* c
351 0 a:1* b5
351 0 a:1* b5
352 0 a:1* d
352 0 a:1* d
353
353
354 Issue2807: alignment of line numbers with -l
354 Issue2807: alignment of line numbers with -l
355
355
356 $ echo more >> b
356 $ echo more >> b
357 $ hg ci -mmore -d '5 0'
357 $ hg ci -mmore -d '5 0'
358 $ echo more >> b
358 $ echo more >> b
359 $ hg ci -mmore -d '6 0'
359 $ hg ci -mmore -d '6 0'
360 $ echo more >> b
360 $ echo more >> b
361 $ hg ci -mmore -d '7 0'
361 $ hg ci -mmore -d '7 0'
362 $ hg annotate -nlf b
362 $ hg annotate -nlf b
363 0 a: 1: a
363 0 a: 1: a
364 6 b: 2: z
364 6 b: 2: z
365 1 a: 3: a
365 1 a: 3: a
366 3 b: 4: b4
366 3 b: 4: b4
367 4 b: 5: c
367 4 b: 5: c
368 3 b: 5: b5
368 3 b: 5: b5
369 7 b: 7: d
369 7 b: 7: d
370 8 b: 8: more
370 8 b: 8: more
371 9 b: 9: more
371 9 b: 9: more
372 10 b:10: more
372 10 b:10: more
373
373
374 linkrev vs rev
374 linkrev vs rev
375
375
376 $ hg annotate -r tip -n a
376 $ hg annotate -r tip -n a
377 0: a
377 0: a
378 1: a
378 1: a
379 1: a
379 1: a
380
380
381 linkrev vs rev with -l
381 linkrev vs rev with -l
382
382
383 $ hg annotate -r tip -nl a
383 $ hg annotate -r tip -nl a
384 0:1: a
384 0:1: a
385 1:2: a
385 1:2: a
386 1:3: a
386 1:3: a
387
387
388 Issue589: "undelete" sequence leads to crash
388 Issue589: "undelete" sequence leads to crash
389
389
390 annotate was crashing when trying to --follow something
390 annotate was crashing when trying to --follow something
391
391
392 like A -> B -> A
392 like A -> B -> A
393
393
394 generate ABA rename configuration
394 generate ABA rename configuration
395
395
396 $ echo foo > foo
396 $ echo foo > foo
397 $ hg add foo
397 $ hg add foo
398 $ hg ci -m addfoo
398 $ hg ci -m addfoo
399 $ hg rename foo bar
399 $ hg rename foo bar
400 $ hg ci -m renamefoo
400 $ hg ci -m renamefoo
401 $ hg rename bar foo
401 $ hg rename bar foo
402 $ hg ci -m renamebar
402 $ hg ci -m renamebar
403
403
404 annotate after ABA with follow
404 annotate after ABA with follow
405
405
406 $ hg annotate --follow foo
406 $ hg annotate --follow foo
407 foo: foo
407 foo: foo
408
408
409 missing file
409 missing file
410
410
411 $ hg ann nosuchfile
411 $ hg ann nosuchfile
412 abort: nosuchfile: no such file in rev e9e6b4fa872f
412 abort: nosuchfile: no such file in rev e9e6b4fa872f
413 [255]
413 [255]
414
414
415 annotate file without '\n' on last line
415 annotate file without '\n' on last line
416
416
417 $ printf "" > c
417 $ printf "" > c
418 $ hg ci -A -m test -u nobody -d '1 0'
418 $ hg ci -A -m test -u nobody -d '1 0'
419 adding c
419 adding c
420 $ hg annotate c
420 $ hg annotate c
421 $ printf "a\nb" > c
421 $ printf "a\nb" > c
422 $ hg ci -m test
422 $ hg ci -m test
423 $ hg annotate c
423 $ hg annotate c
424 [0-9]+: a (re)
424 [0-9]+: a (re)
425 [0-9]+: b (re)
425 [0-9]+: b (re)
426
426
427 Issue3841: check annotation of the file of which filelog includes
427 Issue3841: check annotation of the file of which filelog includes
428 merging between the revision and its ancestor
428 merging between the revision and its ancestor
429
429
430 to reproduce the situation with recent Mercurial, this script uses (1)
430 to reproduce the situation with recent Mercurial, this script uses (1)
431 "hg debugsetparents" to merge without ancestor check by "hg merge",
431 "hg debugsetparents" to merge without ancestor check by "hg merge",
432 and (2) the extension to allow filelog merging between the revision
432 and (2) the extension to allow filelog merging between the revision
433 and its ancestor by overriding "repo._filecommit".
433 and its ancestor by overriding "repo._filecommit".
434
434
435 $ cat > ../legacyrepo.py <<EOF
435 $ cat > ../legacyrepo.py <<EOF
436 > from __future__ import absolute_import
436 > from __future__ import absolute_import
437 > from mercurial import error, node
437 > from mercurial import error, node
438 > def reposetup(ui, repo):
438 > def reposetup(ui, repo):
439 > class legacyrepo(repo.__class__):
439 > class legacyrepo(repo.__class__):
440 > def _filecommit(self, fctx, manifest1, manifest2,
440 > def _filecommit(self, fctx, manifest1, manifest2,
441 > linkrev, tr, changelist):
441 > linkrev, tr, changelist, includecopymeta):
442 > fname = fctx.path()
442 > fname = fctx.path()
443 > text = fctx.data()
443 > text = fctx.data()
444 > flog = self.file(fname)
444 > flog = self.file(fname)
445 > fparent1 = manifest1.get(fname, node.nullid)
445 > fparent1 = manifest1.get(fname, node.nullid)
446 > fparent2 = manifest2.get(fname, node.nullid)
446 > fparent2 = manifest2.get(fname, node.nullid)
447 > meta = {}
447 > meta = {}
448 > copy = fctx.copysource()
448 > copy = fctx.copysource()
449 > if copy and copy != fname:
449 > if copy and copy != fname:
450 > raise error.Abort('copying is not supported')
450 > raise error.Abort('copying is not supported')
451 > if fparent2 != node.nullid:
451 > if fparent2 != node.nullid:
452 > changelist.append(fname)
452 > changelist.append(fname)
453 > return flog.add(text, meta, tr, linkrev,
453 > return flog.add(text, meta, tr, linkrev,
454 > fparent1, fparent2)
454 > fparent1, fparent2)
455 > raise error.Abort('only merging is supported')
455 > raise error.Abort('only merging is supported')
456 > repo.__class__ = legacyrepo
456 > repo.__class__ = legacyrepo
457 > EOF
457 > EOF
458
458
459 $ cat > baz <<EOF
459 $ cat > baz <<EOF
460 > 1
460 > 1
461 > 2
461 > 2
462 > 3
462 > 3
463 > 4
463 > 4
464 > 5
464 > 5
465 > EOF
465 > EOF
466 $ hg add baz
466 $ hg add baz
467 $ hg commit -m "baz:0"
467 $ hg commit -m "baz:0"
468
468
469 $ cat > baz <<EOF
469 $ cat > baz <<EOF
470 > 1 baz:1
470 > 1 baz:1
471 > 2
471 > 2
472 > 3
472 > 3
473 > 4
473 > 4
474 > 5
474 > 5
475 > EOF
475 > EOF
476 $ hg commit -m "baz:1"
476 $ hg commit -m "baz:1"
477
477
478 $ cat > baz <<EOF
478 $ cat > baz <<EOF
479 > 1 baz:1
479 > 1 baz:1
480 > 2 baz:2
480 > 2 baz:2
481 > 3
481 > 3
482 > 4
482 > 4
483 > 5
483 > 5
484 > EOF
484 > EOF
485 $ hg debugsetparents 17 17
485 $ hg debugsetparents 17 17
486 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:2"
486 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:2"
487 $ hg debugindexdot baz
487 $ hg debugindexdot baz
488 digraph G {
488 digraph G {
489 -1 -> 0
489 -1 -> 0
490 0 -> 1
490 0 -> 1
491 1 -> 2
491 1 -> 2
492 1 -> 2
492 1 -> 2
493 }
493 }
494 $ hg annotate baz
494 $ hg annotate baz
495 17: 1 baz:1
495 17: 1 baz:1
496 18: 2 baz:2
496 18: 2 baz:2
497 16: 3
497 16: 3
498 16: 4
498 16: 4
499 16: 5
499 16: 5
500
500
501 $ cat > baz <<EOF
501 $ cat > baz <<EOF
502 > 1 baz:1
502 > 1 baz:1
503 > 2 baz:2
503 > 2 baz:2
504 > 3 baz:3
504 > 3 baz:3
505 > 4
505 > 4
506 > 5
506 > 5
507 > EOF
507 > EOF
508 $ hg commit -m "baz:3"
508 $ hg commit -m "baz:3"
509
509
510 $ cat > baz <<EOF
510 $ cat > baz <<EOF
511 > 1 baz:1
511 > 1 baz:1
512 > 2 baz:2
512 > 2 baz:2
513 > 3 baz:3
513 > 3 baz:3
514 > 4 baz:4
514 > 4 baz:4
515 > 5
515 > 5
516 > EOF
516 > EOF
517 $ hg debugsetparents 19 18
517 $ hg debugsetparents 19 18
518 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:4"
518 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:4"
519 $ hg debugindexdot baz
519 $ hg debugindexdot baz
520 digraph G {
520 digraph G {
521 -1 -> 0
521 -1 -> 0
522 0 -> 1
522 0 -> 1
523 1 -> 2
523 1 -> 2
524 1 -> 2
524 1 -> 2
525 2 -> 3
525 2 -> 3
526 3 -> 4
526 3 -> 4
527 2 -> 4
527 2 -> 4
528 }
528 }
529 $ hg annotate baz
529 $ hg annotate baz
530 17: 1 baz:1
530 17: 1 baz:1
531 18: 2 baz:2
531 18: 2 baz:2
532 19: 3 baz:3
532 19: 3 baz:3
533 20: 4 baz:4
533 20: 4 baz:4
534 16: 5
534 16: 5
535
535
536 annotate clean file
536 annotate clean file
537
537
538 $ hg annotate -ncr "wdir()" foo
538 $ hg annotate -ncr "wdir()" foo
539 11 472b18db256d : foo
539 11 472b18db256d : foo
540
540
541 annotate modified file
541 annotate modified file
542
542
543 $ echo foofoo >> foo
543 $ echo foofoo >> foo
544 $ hg annotate -r "wdir()" foo
544 $ hg annotate -r "wdir()" foo
545 11 : foo
545 11 : foo
546 20+: foofoo
546 20+: foofoo
547
547
548 $ hg annotate -cr "wdir()" foo
548 $ hg annotate -cr "wdir()" foo
549 472b18db256d : foo
549 472b18db256d : foo
550 b6bedd5477e7+: foofoo
550 b6bedd5477e7+: foofoo
551
551
552 $ hg annotate -ncr "wdir()" foo
552 $ hg annotate -ncr "wdir()" foo
553 11 472b18db256d : foo
553 11 472b18db256d : foo
554 20 b6bedd5477e7+: foofoo
554 20 b6bedd5477e7+: foofoo
555
555
556 $ hg annotate --debug -ncr "wdir()" foo
556 $ hg annotate --debug -ncr "wdir()" foo
557 11 472b18db256d1e8282064eab4bfdaf48cbfe83cd : foo
557 11 472b18db256d1e8282064eab4bfdaf48cbfe83cd : foo
558 20 b6bedd5477e797f25e568a6402d4697f3f895a72+: foofoo
558 20 b6bedd5477e797f25e568a6402d4697f3f895a72+: foofoo
559
559
560 $ hg annotate -udr "wdir()" foo
560 $ hg annotate -udr "wdir()" foo
561 test Thu Jan 01 00:00:00 1970 +0000: foo
561 test Thu Jan 01 00:00:00 1970 +0000: foo
562 test [A-Za-z0-9:+ ]+: foofoo (re)
562 test [A-Za-z0-9:+ ]+: foofoo (re)
563
563
564 $ hg annotate -ncr "wdir()" -Tjson foo
564 $ hg annotate -ncr "wdir()" -Tjson foo
565 [
565 [
566 {
566 {
567 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": "ffffffffffffffffffffffffffffffffffffffff", "rev": 2147483647}],
567 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": "ffffffffffffffffffffffffffffffffffffffff", "rev": 2147483647}],
568 "path": "foo"
568 "path": "foo"
569 }
569 }
570 ]
570 ]
571
571
572 annotate added file
572 annotate added file
573
573
574 $ echo bar > bar
574 $ echo bar > bar
575 $ hg add bar
575 $ hg add bar
576 $ hg annotate -ncr "wdir()" bar
576 $ hg annotate -ncr "wdir()" bar
577 20 b6bedd5477e7+: bar
577 20 b6bedd5477e7+: bar
578
578
579 annotate renamed file
579 annotate renamed file
580
580
581 $ hg rename foo renamefoo2
581 $ hg rename foo renamefoo2
582 $ hg annotate -ncr "wdir()" renamefoo2
582 $ hg annotate -ncr "wdir()" renamefoo2
583 11 472b18db256d : foo
583 11 472b18db256d : foo
584 20 b6bedd5477e7+: foofoo
584 20 b6bedd5477e7+: foofoo
585
585
586 annotate missing file
586 annotate missing file
587
587
588 $ rm baz
588 $ rm baz
589
589
590 $ hg annotate -ncr "wdir()" baz
590 $ hg annotate -ncr "wdir()" baz
591 abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
591 abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
592 abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
592 abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
593 [255]
593 [255]
594
594
595 annotate removed file
595 annotate removed file
596
596
597 $ hg rm baz
597 $ hg rm baz
598
598
599 $ hg annotate -ncr "wdir()" baz
599 $ hg annotate -ncr "wdir()" baz
600 abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
600 abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
601 abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
601 abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
602 [255]
602 [255]
603
603
604 $ hg revert --all --no-backup --quiet
604 $ hg revert --all --no-backup --quiet
605 $ hg id -n
605 $ hg id -n
606 20
606 20
607
607
608 Test followlines() revset; we usually check both followlines(pat, range) and
608 Test followlines() revset; we usually check both followlines(pat, range) and
609 followlines(pat, range, descend=True) to make sure both give the same result
609 followlines(pat, range, descend=True) to make sure both give the same result
610 when they should.
610 when they should.
611
611
612 $ echo a >> foo
612 $ echo a >> foo
613 $ hg ci -m 'foo: add a'
613 $ hg ci -m 'foo: add a'
614 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5)'
614 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5)'
615 16: baz:0
615 16: baz:0
616 19: baz:3
616 19: baz:3
617 20: baz:4
617 20: baz:4
618 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=20)'
618 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=20)'
619 16: baz:0
619 16: baz:0
620 19: baz:3
620 19: baz:3
621 20: baz:4
621 20: baz:4
622 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19)'
622 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19)'
623 16: baz:0
623 16: baz:0
624 19: baz:3
624 19: baz:3
625 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19, descend=True)'
625 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19, descend=True)'
626 19: baz:3
626 19: baz:3
627 20: baz:4
627 20: baz:4
628 $ printf "0\n0\n" | cat - baz > baz1
628 $ printf "0\n0\n" | cat - baz > baz1
629 $ mv baz1 baz
629 $ mv baz1 baz
630 $ hg ci -m 'added two lines with 0'
630 $ hg ci -m 'added two lines with 0'
631 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
631 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
632 16: baz:0
632 16: baz:0
633 19: baz:3
633 19: baz:3
634 20: baz:4
634 20: baz:4
635 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, descend=true, startrev=19)'
635 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, descend=true, startrev=19)'
636 19: baz:3
636 19: baz:3
637 20: baz:4
637 20: baz:4
638 $ echo 6 >> baz
638 $ echo 6 >> baz
639 $ hg ci -m 'added line 8'
639 $ hg ci -m 'added line 8'
640 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
640 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
641 16: baz:0
641 16: baz:0
642 19: baz:3
642 19: baz:3
643 20: baz:4
643 20: baz:4
644 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19, descend=1)'
644 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19, descend=1)'
645 19: baz:3
645 19: baz:3
646 20: baz:4
646 20: baz:4
647 $ sed 's/3/3+/' baz > baz.new
647 $ sed 's/3/3+/' baz > baz.new
648 $ mv baz.new baz
648 $ mv baz.new baz
649 $ hg ci -m 'baz:3->3+'
649 $ hg ci -m 'baz:3->3+'
650 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, descend=0)'
650 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, descend=0)'
651 16: baz:0
651 16: baz:0
652 19: baz:3
652 19: baz:3
653 20: baz:4
653 20: baz:4
654 24: baz:3->3+
654 24: baz:3->3+
655 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=17, descend=True)'
655 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=17, descend=True)'
656 19: baz:3
656 19: baz:3
657 20: baz:4
657 20: baz:4
658 24: baz:3->3+
658 24: baz:3->3+
659 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 1:2, descend=false)'
659 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 1:2, descend=false)'
660 22: added two lines with 0
660 22: added two lines with 0
661
661
662 file patterns are okay
662 file patterns are okay
663 $ hg log -T '{rev}: {desc}\n' -r 'followlines("path:baz", 1:2)'
663 $ hg log -T '{rev}: {desc}\n' -r 'followlines("path:baz", 1:2)'
664 22: added two lines with 0
664 22: added two lines with 0
665
665
666 renames are followed
666 renames are followed
667 $ hg mv baz qux
667 $ hg mv baz qux
668 $ sed 's/4/4+/' qux > qux.new
668 $ sed 's/4/4+/' qux > qux.new
669 $ mv qux.new qux
669 $ mv qux.new qux
670 $ hg ci -m 'qux:4->4+'
670 $ hg ci -m 'qux:4->4+'
671 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
671 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
672 16: baz:0
672 16: baz:0
673 19: baz:3
673 19: baz:3
674 20: baz:4
674 20: baz:4
675 24: baz:3->3+
675 24: baz:3->3+
676 25: qux:4->4+
676 25: qux:4->4+
677
677
678 but are missed when following children
678 but are missed when following children
679 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, startrev=22, descend=True)'
679 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, startrev=22, descend=True)'
680 24: baz:3->3+
680 24: baz:3->3+
681
681
682 merge
682 merge
683 $ hg up 24 --quiet
683 $ hg up 24 --quiet
684 $ echo 7 >> baz
684 $ echo 7 >> baz
685 $ hg ci -m 'one more line, out of line range'
685 $ hg ci -m 'one more line, out of line range'
686 created new head
686 created new head
687 $ sed 's/3+/3-/' baz > baz.new
687 $ sed 's/3+/3-/' baz > baz.new
688 $ mv baz.new baz
688 $ mv baz.new baz
689 $ hg ci -m 'baz:3+->3-'
689 $ hg ci -m 'baz:3+->3-'
690 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
690 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
691 16: baz:0
691 16: baz:0
692 19: baz:3
692 19: baz:3
693 20: baz:4
693 20: baz:4
694 24: baz:3->3+
694 24: baz:3->3+
695 27: baz:3+->3-
695 27: baz:3+->3-
696 $ hg merge 25
696 $ hg merge 25
697 merging baz and qux to qux
697 merging baz and qux to qux
698 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
698 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
699 (branch merge, don't forget to commit)
699 (branch merge, don't forget to commit)
700 $ hg ci -m merge
700 $ hg ci -m merge
701 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
701 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
702 16: baz:0
702 16: baz:0
703 19: baz:3
703 19: baz:3
704 20: baz:4
704 20: baz:4
705 24: baz:3->3+
705 24: baz:3->3+
706 25: qux:4->4+
706 25: qux:4->4+
707 27: baz:3+->3-
707 27: baz:3+->3-
708 28: merge
708 28: merge
709 $ hg up 25 --quiet
709 $ hg up 25 --quiet
710 $ hg merge 27
710 $ hg merge 27
711 merging qux and baz to qux
711 merging qux and baz to qux
712 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
712 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
713 (branch merge, don't forget to commit)
713 (branch merge, don't forget to commit)
714 $ hg ci -m 'merge from other side'
714 $ hg ci -m 'merge from other side'
715 created new head
715 created new head
716 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
716 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
717 16: baz:0
717 16: baz:0
718 19: baz:3
718 19: baz:3
719 20: baz:4
719 20: baz:4
720 24: baz:3->3+
720 24: baz:3->3+
721 25: qux:4->4+
721 25: qux:4->4+
722 27: baz:3+->3-
722 27: baz:3+->3-
723 29: merge from other side
723 29: merge from other side
724 $ hg up 24 --quiet
724 $ hg up 24 --quiet
725
725
726 we are missing the branch with rename when following children
726 we are missing the branch with rename when following children
727 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, startrev=26, descend=True)'
727 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, startrev=26, descend=True)'
728 27: baz:3+->3-
728 27: baz:3+->3-
729
729
730 we follow all branches in descending direction
730 we follow all branches in descending direction
731 $ hg up 23 --quiet
731 $ hg up 23 --quiet
732 $ sed 's/3/+3/' baz > baz.new
732 $ sed 's/3/+3/' baz > baz.new
733 $ mv baz.new baz
733 $ mv baz.new baz
734 $ hg ci -m 'baz:3->+3'
734 $ hg ci -m 'baz:3->+3'
735 created new head
735 created new head
736 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 2:5, startrev=16, descend=True)' --graph
736 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 2:5, startrev=16, descend=True)' --graph
737 @ 30: baz:3->+3
737 @ 30: baz:3->+3
738 :
738 :
739 : o 27: baz:3+->3-
739 : o 27: baz:3+->3-
740 : :
740 : :
741 : o 24: baz:3->3+
741 : o 24: baz:3->3+
742 :/
742 :/
743 o 20: baz:4
743 o 20: baz:4
744 |\
744 |\
745 | o 19: baz:3
745 | o 19: baz:3
746 |/
746 |/
747 o 18: baz:2
747 o 18: baz:2
748 :
748 :
749 o 16: baz:0
749 o 16: baz:0
750 |
750 |
751 ~
751 ~
752
752
753 Issue5595: on a merge changeset with different line ranges depending on
753 Issue5595: on a merge changeset with different line ranges depending on
754 parent, be conservative and use the surrounding interval to avoid loosing
754 parent, be conservative and use the surrounding interval to avoid loosing
755 track of possible further descendants in specified range.
755 track of possible further descendants in specified range.
756
756
757 $ hg up 23 --quiet
757 $ hg up 23 --quiet
758 $ hg cat baz -r 24
758 $ hg cat baz -r 24
759 0
759 0
760 0
760 0
761 1 baz:1
761 1 baz:1
762 2 baz:2
762 2 baz:2
763 3+ baz:3
763 3+ baz:3
764 4 baz:4
764 4 baz:4
765 5
765 5
766 6
766 6
767 $ cat > baz << EOF
767 $ cat > baz << EOF
768 > 0
768 > 0
769 > 0
769 > 0
770 > a
770 > a
771 > b
771 > b
772 > 3+ baz:3
772 > 3+ baz:3
773 > 4 baz:4
773 > 4 baz:4
774 > y
774 > y
775 > z
775 > z
776 > EOF
776 > EOF
777 $ hg ci -m 'baz: mostly rewrite with some content from 24'
777 $ hg ci -m 'baz: mostly rewrite with some content from 24'
778 created new head
778 created new head
779 $ hg merge --tool :merge-other 24
779 $ hg merge --tool :merge-other 24
780 merging baz
780 merging baz
781 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
781 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
782 (branch merge, don't forget to commit)
782 (branch merge, don't forget to commit)
783 $ hg ci -m 'merge forgetting about baz rewrite'
783 $ hg ci -m 'merge forgetting about baz rewrite'
784 $ cat > baz << EOF
784 $ cat > baz << EOF
785 > 0
785 > 0
786 > 0
786 > 0
787 > 1 baz:1
787 > 1 baz:1
788 > 2+ baz:2
788 > 2+ baz:2
789 > 3+ baz:3
789 > 3+ baz:3
790 > 4 baz:4
790 > 4 baz:4
791 > 5
791 > 5
792 > 6
792 > 6
793 > EOF
793 > EOF
794 $ hg ci -m 'baz: narrow change (2->2+)'
794 $ hg ci -m 'baz: narrow change (2->2+)'
795 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:4, startrev=20, descend=True)' --graph
795 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:4, startrev=20, descend=True)' --graph
796 @ 33: baz: narrow change (2->2+)
796 @ 33: baz: narrow change (2->2+)
797 |
797 |
798 o 32: merge forgetting about baz rewrite
798 o 32: merge forgetting about baz rewrite
799 |\
799 |\
800 | o 31: baz: mostly rewrite with some content from 24
800 | o 31: baz: mostly rewrite with some content from 24
801 | :
801 | :
802 | : o 30: baz:3->+3
802 | : o 30: baz:3->+3
803 | :/
803 | :/
804 +---o 27: baz:3+->3-
804 +---o 27: baz:3+->3-
805 | :
805 | :
806 o : 24: baz:3->3+
806 o : 24: baz:3->3+
807 :/
807 :/
808 o 20: baz:4
808 o 20: baz:4
809 |\
809 |\
810 ~ ~
810 ~ ~
811
811
812 An integer as a line range, which is parsed as '1:1'
812 An integer as a line range, which is parsed as '1:1'
813
813
814 $ hg log -r 'followlines(baz, 1)'
814 $ hg log -r 'followlines(baz, 1)'
815 changeset: 22:2174d0bf352a
815 changeset: 22:2174d0bf352a
816 user: test
816 user: test
817 date: Thu Jan 01 00:00:00 1970 +0000
817 date: Thu Jan 01 00:00:00 1970 +0000
818 summary: added two lines with 0
818 summary: added two lines with 0
819
819
820
820
821 check error cases
821 check error cases
822 $ hg up 24 --quiet
822 $ hg up 24 --quiet
823 $ hg log -r 'followlines()'
823 $ hg log -r 'followlines()'
824 hg: parse error: followlines takes at least 1 positional arguments
824 hg: parse error: followlines takes at least 1 positional arguments
825 [255]
825 [255]
826 $ hg log -r 'followlines(baz)'
826 $ hg log -r 'followlines(baz)'
827 hg: parse error: followlines requires a line range
827 hg: parse error: followlines requires a line range
828 [255]
828 [255]
829 $ hg log -r 'followlines(baz, x)'
829 $ hg log -r 'followlines(baz, x)'
830 hg: parse error: followlines expects a line number or a range
830 hg: parse error: followlines expects a line number or a range
831 [255]
831 [255]
832 $ hg log -r 'followlines(baz, 1:2, startrev=desc("b"))'
832 $ hg log -r 'followlines(baz, 1:2, startrev=desc("b"))'
833 hg: parse error: followlines expects exactly one revision
833 hg: parse error: followlines expects exactly one revision
834 [255]
834 [255]
835 $ hg log -r 'followlines("glob:*", 1:2)'
835 $ hg log -r 'followlines("glob:*", 1:2)'
836 hg: parse error: followlines expects exactly one file
836 hg: parse error: followlines expects exactly one file
837 [255]
837 [255]
838 $ hg log -r 'followlines(baz, 1:)'
838 $ hg log -r 'followlines(baz, 1:)'
839 hg: parse error: line range bounds must be integers
839 hg: parse error: line range bounds must be integers
840 [255]
840 [255]
841 $ hg log -r 'followlines(baz, :1)'
841 $ hg log -r 'followlines(baz, :1)'
842 hg: parse error: line range bounds must be integers
842 hg: parse error: line range bounds must be integers
843 [255]
843 [255]
844 $ hg log -r 'followlines(baz, x:4)'
844 $ hg log -r 'followlines(baz, x:4)'
845 hg: parse error: line range bounds must be integers
845 hg: parse error: line range bounds must be integers
846 [255]
846 [255]
847 $ hg log -r 'followlines(baz, 5:4)'
847 $ hg log -r 'followlines(baz, 5:4)'
848 hg: parse error: line range must be positive
848 hg: parse error: line range must be positive
849 [255]
849 [255]
850 $ hg log -r 'followlines(baz, 0:4)'
850 $ hg log -r 'followlines(baz, 0:4)'
851 hg: parse error: fromline must be strictly positive
851 hg: parse error: fromline must be strictly positive
852 [255]
852 [255]
853 $ hg log -r 'followlines(baz, 2:40)'
853 $ hg log -r 'followlines(baz, 2:40)'
854 abort: line range exceeds file size
854 abort: line range exceeds file size
855 [255]
855 [255]
856 $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=[1])'
856 $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=[1])'
857 hg: parse error at 43: not a prefix: [
857 hg: parse error at 43: not a prefix: [
858 (followlines(baz, 2:4, startrev=20, descend=[1])
858 (followlines(baz, 2:4, startrev=20, descend=[1])
859 ^ here)
859 ^ here)
860 [255]
860 [255]
861 $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=a)'
861 $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=a)'
862 hg: parse error: descend argument must be a boolean
862 hg: parse error: descend argument must be a boolean
863 [255]
863 [255]
864
864
865 Test empty annotate output
865 Test empty annotate output
866
866
867 $ printf '\0' > binary
867 $ printf '\0' > binary
868 $ touch empty
868 $ touch empty
869 $ hg ci -qAm 'add binary and empty files'
869 $ hg ci -qAm 'add binary and empty files'
870
870
871 $ hg annotate binary empty
871 $ hg annotate binary empty
872 binary: binary file
872 binary: binary file
873
873
874 $ hg annotate -Tjson binary empty
874 $ hg annotate -Tjson binary empty
875 [
875 [
876 {
876 {
877 "path": "binary"
877 "path": "binary"
878 },
878 },
879 {
879 {
880 "lines": [],
880 "lines": [],
881 "path": "empty"
881 "path": "empty"
882 }
882 }
883 ]
883 ]
884
884
885 Test annotate with whitespace options
885 Test annotate with whitespace options
886
886
887 $ cd ..
887 $ cd ..
888 $ hg init repo-ws
888 $ hg init repo-ws
889 $ cd repo-ws
889 $ cd repo-ws
890 $ cat > a <<EOF
890 $ cat > a <<EOF
891 > aa
891 > aa
892 >
892 >
893 > b b
893 > b b
894 > EOF
894 > EOF
895 $ hg ci -Am "adda"
895 $ hg ci -Am "adda"
896 adding a
896 adding a
897 $ sed 's/EOL$//g' > a <<EOF
897 $ sed 's/EOL$//g' > a <<EOF
898 > a a
898 > a a
899 >
899 >
900 > EOL
900 > EOL
901 > b b
901 > b b
902 > EOF
902 > EOF
903 $ hg ci -m "changea"
903 $ hg ci -m "changea"
904
904
905 Annotate with no option
905 Annotate with no option
906
906
907 $ hg annotate a
907 $ hg annotate a
908 1: a a
908 1: a a
909 0:
909 0:
910 1:
910 1:
911 1: b b
911 1: b b
912
912
913 Annotate with --ignore-space-change
913 Annotate with --ignore-space-change
914
914
915 $ hg annotate --ignore-space-change a
915 $ hg annotate --ignore-space-change a
916 1: a a
916 1: a a
917 1:
917 1:
918 0:
918 0:
919 0: b b
919 0: b b
920
920
921 Annotate with --ignore-all-space
921 Annotate with --ignore-all-space
922
922
923 $ hg annotate --ignore-all-space a
923 $ hg annotate --ignore-all-space a
924 0: a a
924 0: a a
925 0:
925 0:
926 1:
926 1:
927 0: b b
927 0: b b
928
928
929 Annotate with --ignore-blank-lines (similar to no options case)
929 Annotate with --ignore-blank-lines (similar to no options case)
930
930
931 $ hg annotate --ignore-blank-lines a
931 $ hg annotate --ignore-blank-lines a
932 1: a a
932 1: a a
933 0:
933 0:
934 1:
934 1:
935 1: b b
935 1: b b
936
936
937 $ cd ..
937 $ cd ..
938
938
939 Annotate with orphaned CR (issue5798)
939 Annotate with orphaned CR (issue5798)
940 -------------------------------------
940 -------------------------------------
941
941
942 $ hg init repo-cr
942 $ hg init repo-cr
943 $ cd repo-cr
943 $ cd repo-cr
944
944
945 $ cat <<'EOF' >> "$TESTTMP/substcr.py"
945 $ cat <<'EOF' >> "$TESTTMP/substcr.py"
946 > import sys
946 > import sys
947 > from mercurial.utils import procutil
947 > from mercurial.utils import procutil
948 > procutil.setbinary(sys.stdin)
948 > procutil.setbinary(sys.stdin)
949 > procutil.setbinary(sys.stdout)
949 > procutil.setbinary(sys.stdout)
950 > stdin = getattr(sys.stdin, 'buffer', sys.stdin)
950 > stdin = getattr(sys.stdin, 'buffer', sys.stdin)
951 > stdout = getattr(sys.stdout, 'buffer', sys.stdout)
951 > stdout = getattr(sys.stdout, 'buffer', sys.stdout)
952 > stdout.write(stdin.read().replace(b'\r', b'[CR]'))
952 > stdout.write(stdin.read().replace(b'\r', b'[CR]'))
953 > EOF
953 > EOF
954
954
955 >>> with open('a', 'wb') as f:
955 >>> with open('a', 'wb') as f:
956 ... f.write(b'0a\r0b\r\n0c\r0d\r\n0e\n0f\n0g') and None
956 ... f.write(b'0a\r0b\r\n0c\r0d\r\n0e\n0f\n0g') and None
957 $ hg ci -qAm0
957 $ hg ci -qAm0
958 >>> with open('a', 'wb') as f:
958 >>> with open('a', 'wb') as f:
959 ... f.write(b'0a\r0b\r\n1c\r1d\r\n0e\n1f\n0g') and None
959 ... f.write(b'0a\r0b\r\n1c\r1d\r\n0e\n1f\n0g') and None
960 $ hg ci -m1
960 $ hg ci -m1
961
961
962 $ hg annotate -r0 a | "$PYTHON" "$TESTTMP/substcr.py"
962 $ hg annotate -r0 a | "$PYTHON" "$TESTTMP/substcr.py"
963 0: 0a[CR]0b[CR]
963 0: 0a[CR]0b[CR]
964 0: 0c[CR]0d[CR]
964 0: 0c[CR]0d[CR]
965 0: 0e
965 0: 0e
966 0: 0f
966 0: 0f
967 0: 0g
967 0: 0g
968 $ hg annotate -r1 a | "$PYTHON" "$TESTTMP/substcr.py"
968 $ hg annotate -r1 a | "$PYTHON" "$TESTTMP/substcr.py"
969 0: 0a[CR]0b[CR]
969 0: 0a[CR]0b[CR]
970 1: 1c[CR]1d[CR]
970 1: 1c[CR]1d[CR]
971 0: 0e
971 0: 0e
972 1: 1f
972 1: 1f
973 0: 0g
973 0: 0g
974
974
975 $ cd ..
975 $ cd ..
976
976
977 Annotate with linkrev pointing to another branch
977 Annotate with linkrev pointing to another branch
978 ------------------------------------------------
978 ------------------------------------------------
979
979
980 create history with a filerev whose linkrev points to another branch
980 create history with a filerev whose linkrev points to another branch
981
981
982 $ hg init branchedlinkrev
982 $ hg init branchedlinkrev
983 $ cd branchedlinkrev
983 $ cd branchedlinkrev
984 $ echo A > a
984 $ echo A > a
985 $ hg commit -Am 'contentA'
985 $ hg commit -Am 'contentA'
986 adding a
986 adding a
987 $ echo B >> a
987 $ echo B >> a
988 $ hg commit -m 'contentB'
988 $ hg commit -m 'contentB'
989 $ hg up --rev 'desc(contentA)'
989 $ hg up --rev 'desc(contentA)'
990 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
990 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
991 $ echo unrelated > unrelated
991 $ echo unrelated > unrelated
992 $ hg commit -Am 'unrelated'
992 $ hg commit -Am 'unrelated'
993 adding unrelated
993 adding unrelated
994 created new head
994 created new head
995 $ hg graft -r 'desc(contentB)'
995 $ hg graft -r 'desc(contentB)'
996 grafting 1:fd27c222e3e6 "contentB"
996 grafting 1:fd27c222e3e6 "contentB"
997 $ echo C >> a
997 $ echo C >> a
998 $ hg commit -m 'contentC'
998 $ hg commit -m 'contentC'
999 $ echo W >> a
999 $ echo W >> a
1000 $ hg log -G
1000 $ hg log -G
1001 @ changeset: 4:072f1e8df249
1001 @ changeset: 4:072f1e8df249
1002 | tag: tip
1002 | tag: tip
1003 | user: test
1003 | user: test
1004 | date: Thu Jan 01 00:00:00 1970 +0000
1004 | date: Thu Jan 01 00:00:00 1970 +0000
1005 | summary: contentC
1005 | summary: contentC
1006 |
1006 |
1007 o changeset: 3:ff38df03cc4b
1007 o changeset: 3:ff38df03cc4b
1008 | user: test
1008 | user: test
1009 | date: Thu Jan 01 00:00:00 1970 +0000
1009 | date: Thu Jan 01 00:00:00 1970 +0000
1010 | summary: contentB
1010 | summary: contentB
1011 |
1011 |
1012 o changeset: 2:62aaf3f6fc06
1012 o changeset: 2:62aaf3f6fc06
1013 | parent: 0:f0932f74827e
1013 | parent: 0:f0932f74827e
1014 | user: test
1014 | user: test
1015 | date: Thu Jan 01 00:00:00 1970 +0000
1015 | date: Thu Jan 01 00:00:00 1970 +0000
1016 | summary: unrelated
1016 | summary: unrelated
1017 |
1017 |
1018 | o changeset: 1:fd27c222e3e6
1018 | o changeset: 1:fd27c222e3e6
1019 |/ user: test
1019 |/ user: test
1020 | date: Thu Jan 01 00:00:00 1970 +0000
1020 | date: Thu Jan 01 00:00:00 1970 +0000
1021 | summary: contentB
1021 | summary: contentB
1022 |
1022 |
1023 o changeset: 0:f0932f74827e
1023 o changeset: 0:f0932f74827e
1024 user: test
1024 user: test
1025 date: Thu Jan 01 00:00:00 1970 +0000
1025 date: Thu Jan 01 00:00:00 1970 +0000
1026 summary: contentA
1026 summary: contentA
1027
1027
1028
1028
1029 Annotate should list ancestor of starting revision only
1029 Annotate should list ancestor of starting revision only
1030
1030
1031 $ hg annotate a
1031 $ hg annotate a
1032 0: A
1032 0: A
1033 3: B
1033 3: B
1034 4: C
1034 4: C
1035
1035
1036 $ hg annotate a -r 'wdir()'
1036 $ hg annotate a -r 'wdir()'
1037 0 : A
1037 0 : A
1038 3 : B
1038 3 : B
1039 4 : C
1039 4 : C
1040 4+: W
1040 4+: W
1041
1041
1042 Even when the starting revision is the linkrev-shadowed one:
1042 Even when the starting revision is the linkrev-shadowed one:
1043
1043
1044 $ hg annotate a -r 3
1044 $ hg annotate a -r 3
1045 0: A
1045 0: A
1046 3: B
1046 3: B
1047
1047
1048 $ cd ..
1048 $ cd ..
1049
1049
1050 Issue5360: Deleted chunk in p1 of a merge changeset
1050 Issue5360: Deleted chunk in p1 of a merge changeset
1051
1051
1052 $ hg init repo-5360
1052 $ hg init repo-5360
1053 $ cd repo-5360
1053 $ cd repo-5360
1054 $ echo 1 > a
1054 $ echo 1 > a
1055 $ hg commit -A a -m 1
1055 $ hg commit -A a -m 1
1056 $ echo 2 >> a
1056 $ echo 2 >> a
1057 $ hg commit -m 2
1057 $ hg commit -m 2
1058 $ echo a > a
1058 $ echo a > a
1059 $ hg commit -m a
1059 $ hg commit -m a
1060 $ hg update '.^' -q
1060 $ hg update '.^' -q
1061 $ echo 3 >> a
1061 $ echo 3 >> a
1062 $ hg commit -m 3 -q
1062 $ hg commit -m 3 -q
1063 $ hg merge 2 -q
1063 $ hg merge 2 -q
1064 $ cat > a << EOF
1064 $ cat > a << EOF
1065 > b
1065 > b
1066 > 1
1066 > 1
1067 > 2
1067 > 2
1068 > 3
1068 > 3
1069 > a
1069 > a
1070 > EOF
1070 > EOF
1071 $ hg resolve --mark -q
1071 $ hg resolve --mark -q
1072 $ hg commit -m m
1072 $ hg commit -m m
1073 $ hg annotate a
1073 $ hg annotate a
1074 4: b
1074 4: b
1075 0: 1
1075 0: 1
1076 1: 2
1076 1: 2
1077 3: 3
1077 3: 3
1078 2: a
1078 2: a
1079
1079
1080 $ cd ..
1080 $ cd ..
@@ -1,764 +1,764
1 (this file is backported from core hg tests/test-annotate.t)
1 (this file is backported from core hg tests/test-annotate.t)
2
2
3 $ cat >> $HGRCPATH << EOF
3 $ cat >> $HGRCPATH << EOF
4 > [diff]
4 > [diff]
5 > git=1
5 > git=1
6 > [extensions]
6 > [extensions]
7 > fastannotate=
7 > fastannotate=
8 > [fastannotate]
8 > [fastannotate]
9 > modes=fctx
9 > modes=fctx
10 > forcefollow=False
10 > forcefollow=False
11 > mainbranch=.
11 > mainbranch=.
12 > EOF
12 > EOF
13
13
14 $ HGMERGE=true; export HGMERGE
14 $ HGMERGE=true; export HGMERGE
15
15
16 init
16 init
17
17
18 $ hg init repo
18 $ hg init repo
19 $ cd repo
19 $ cd repo
20
20
21 commit
21 commit
22
22
23 $ echo 'a' > a
23 $ echo 'a' > a
24 $ hg ci -A -m test -u nobody -d '1 0'
24 $ hg ci -A -m test -u nobody -d '1 0'
25 adding a
25 adding a
26
26
27 annotate -c
27 annotate -c
28
28
29 $ hg annotate -c a
29 $ hg annotate -c a
30 8435f90966e4: a
30 8435f90966e4: a
31
31
32 annotate -cl
32 annotate -cl
33
33
34 $ hg annotate -cl a
34 $ hg annotate -cl a
35 8435f90966e4:1: a
35 8435f90966e4:1: a
36
36
37 annotate -d
37 annotate -d
38
38
39 $ hg annotate -d a
39 $ hg annotate -d a
40 Thu Jan 01 00:00:01 1970 +0000: a
40 Thu Jan 01 00:00:01 1970 +0000: a
41
41
42 annotate -n
42 annotate -n
43
43
44 $ hg annotate -n a
44 $ hg annotate -n a
45 0: a
45 0: a
46
46
47 annotate -nl
47 annotate -nl
48
48
49 $ hg annotate -nl a
49 $ hg annotate -nl a
50 0:1: a
50 0:1: a
51
51
52 annotate -u
52 annotate -u
53
53
54 $ hg annotate -u a
54 $ hg annotate -u a
55 nobody: a
55 nobody: a
56
56
57 annotate -cdnu
57 annotate -cdnu
58
58
59 $ hg annotate -cdnu a
59 $ hg annotate -cdnu a
60 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a
60 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a
61
61
62 annotate -cdnul
62 annotate -cdnul
63
63
64 $ hg annotate -cdnul a
64 $ hg annotate -cdnul a
65 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a
65 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a
66
66
67 annotate (JSON)
67 annotate (JSON)
68
68
69 $ hg annotate -Tjson a
69 $ hg annotate -Tjson a
70 [
70 [
71 {
71 {
72 "lines": [{"line": "a\n", "rev": 0}],
72 "lines": [{"line": "a\n", "rev": 0}],
73 "path": "a"
73 "path": "a"
74 }
74 }
75 ]
75 ]
76
76
77 $ hg annotate -Tjson -cdfnul a
77 $ hg annotate -Tjson -cdfnul a
78 [
78 [
79 {
79 {
80 "lines": [{"date": [1.0, 0], "line": "a\n", "lineno": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "path": "a", "rev": 0, "user": "nobody"}],
80 "lines": [{"date": [1.0, 0], "line": "a\n", "lineno": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "path": "a", "rev": 0, "user": "nobody"}],
81 "path": "a"
81 "path": "a"
82 }
82 }
83 ]
83 ]
84
84
85 $ cat <<EOF >>a
85 $ cat <<EOF >>a
86 > a
86 > a
87 > a
87 > a
88 > EOF
88 > EOF
89 $ hg ci -ma1 -d '1 0'
89 $ hg ci -ma1 -d '1 0'
90 $ hg cp a b
90 $ hg cp a b
91 $ hg ci -mb -d '1 0'
91 $ hg ci -mb -d '1 0'
92 $ cat <<EOF >> b
92 $ cat <<EOF >> b
93 > b4
93 > b4
94 > b5
94 > b5
95 > b6
95 > b6
96 > EOF
96 > EOF
97 $ hg ci -mb2 -d '2 0'
97 $ hg ci -mb2 -d '2 0'
98
98
99 annotate -n b
99 annotate -n b
100
100
101 $ hg annotate -n b
101 $ hg annotate -n b
102 0: a
102 0: a
103 1: a
103 1: a
104 1: a
104 1: a
105 3: b4
105 3: b4
106 3: b5
106 3: b5
107 3: b6
107 3: b6
108
108
109 annotate --no-follow b
109 annotate --no-follow b
110
110
111 $ hg annotate --no-follow b
111 $ hg annotate --no-follow b
112 2: a
112 2: a
113 2: a
113 2: a
114 2: a
114 2: a
115 3: b4
115 3: b4
116 3: b5
116 3: b5
117 3: b6
117 3: b6
118
118
119 annotate -nl b
119 annotate -nl b
120
120
121 $ hg annotate -nl b
121 $ hg annotate -nl b
122 0:1: a
122 0:1: a
123 1:2: a
123 1:2: a
124 1:3: a
124 1:3: a
125 3:4: b4
125 3:4: b4
126 3:5: b5
126 3:5: b5
127 3:6: b6
127 3:6: b6
128
128
129 annotate -nf b
129 annotate -nf b
130
130
131 $ hg annotate -nf b
131 $ hg annotate -nf b
132 0 a: a
132 0 a: a
133 1 a: a
133 1 a: a
134 1 a: a
134 1 a: a
135 3 b: b4
135 3 b: b4
136 3 b: b5
136 3 b: b5
137 3 b: b6
137 3 b: b6
138
138
139 annotate -nlf b
139 annotate -nlf b
140
140
141 $ hg annotate -nlf b
141 $ hg annotate -nlf b
142 0 a:1: a
142 0 a:1: a
143 1 a:2: a
143 1 a:2: a
144 1 a:3: a
144 1 a:3: a
145 3 b:4: b4
145 3 b:4: b4
146 3 b:5: b5
146 3 b:5: b5
147 3 b:6: b6
147 3 b:6: b6
148
148
149 $ hg up -C 2
149 $ hg up -C 2
150 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
150 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
151 $ cat <<EOF >> b
151 $ cat <<EOF >> b
152 > b4
152 > b4
153 > c
153 > c
154 > b5
154 > b5
155 > EOF
155 > EOF
156 $ hg ci -mb2.1 -d '2 0'
156 $ hg ci -mb2.1 -d '2 0'
157 created new head
157 created new head
158 $ hg merge
158 $ hg merge
159 merging b
159 merging b
160 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
160 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
161 (branch merge, don't forget to commit)
161 (branch merge, don't forget to commit)
162 $ hg ci -mmergeb -d '3 0'
162 $ hg ci -mmergeb -d '3 0'
163
163
164 annotate after merge
164 annotate after merge
165 (note: the first one falls back to the vanilla annotate which does not use linelog)
165 (note: the first one falls back to the vanilla annotate which does not use linelog)
166
166
167 $ hg annotate -nf b --debug
167 $ hg annotate -nf b --debug
168 fastannotate: b: rebuilding broken cache
168 fastannotate: b: rebuilding broken cache
169 fastannotate: b: 5 new changesets in the main branch
169 fastannotate: b: 5 new changesets in the main branch
170 0 a: a
170 0 a: a
171 1 a: a
171 1 a: a
172 1 a: a
172 1 a: a
173 3 b: b4
173 3 b: b4
174 4 b: c
174 4 b: c
175 3 b: b5
175 3 b: b5
176
176
177 (difference explained below)
177 (difference explained below)
178
178
179 $ hg annotate -nf b --debug
179 $ hg annotate -nf b --debug
180 fastannotate: b: using fast path (resolved fctx: False)
180 fastannotate: b: using fast path (resolved fctx: False)
181 0 a: a
181 0 a: a
182 1 a: a
182 1 a: a
183 1 a: a
183 1 a: a
184 4 b: b4
184 4 b: b4
185 4 b: c
185 4 b: c
186 4 b: b5
186 4 b: b5
187
187
188 annotate after merge with -l
188 annotate after merge with -l
189 (fastannotate differs from annotate)
189 (fastannotate differs from annotate)
190
190
191 $ hg log -Gp -T '{rev}:{node}' -r '2..5'
191 $ hg log -Gp -T '{rev}:{node}' -r '2..5'
192 @ 5:64afcdf8e29e063c635be123d8d2fb160af00f7e
192 @ 5:64afcdf8e29e063c635be123d8d2fb160af00f7e
193 |\
193 |\
194 | o 4:5fbdc1152d97597717021ad9e063061b200f146bdiff --git a/b b/b
194 | o 4:5fbdc1152d97597717021ad9e063061b200f146bdiff --git a/b b/b
195 | | --- a/b
195 | | --- a/b
196 | | +++ b/b
196 | | +++ b/b
197 | | @@ -1,3 +1,6 @@
197 | | @@ -1,3 +1,6 @@
198 | | a
198 | | a
199 | | a
199 | | a
200 | | a
200 | | a
201 | | +b4
201 | | +b4
202 | | +c
202 | | +c
203 | | +b5
203 | | +b5
204 | |
204 | |
205 o | 3:37ec9f5c3d1f99572d7075971cb4876e2139b52fdiff --git a/b b/b
205 o | 3:37ec9f5c3d1f99572d7075971cb4876e2139b52fdiff --git a/b b/b
206 |/ --- a/b
206 |/ --- a/b
207 | +++ b/b
207 | +++ b/b
208 | @@ -1,3 +1,6 @@
208 | @@ -1,3 +1,6 @@
209 | a
209 | a
210 | a
210 | a
211 | a
211 | a
212 | +b4
212 | +b4
213 | +b5
213 | +b5
214 | +b6
214 | +b6
215 |
215 |
216 o 2:3086dbafde1ce745abfc8d2d367847280aabae9ddiff --git a/a b/b
216 o 2:3086dbafde1ce745abfc8d2d367847280aabae9ddiff --git a/a b/b
217 | copy from a
217 | copy from a
218 ~ copy to b
218 ~ copy to b
219
219
220
220
221 (in this case, "b4", "b5" could be considered introduced by either rev 3, or rev 4.
221 (in this case, "b4", "b5" could be considered introduced by either rev 3, or rev 4.
222 and that causes the rev number difference)
222 and that causes the rev number difference)
223
223
224 $ hg annotate -nlf b --config fastannotate.modes=
224 $ hg annotate -nlf b --config fastannotate.modes=
225 0 a:1: a
225 0 a:1: a
226 1 a:2: a
226 1 a:2: a
227 1 a:3: a
227 1 a:3: a
228 3 b:4: b4
228 3 b:4: b4
229 4 b:5: c
229 4 b:5: c
230 3 b:5: b5
230 3 b:5: b5
231
231
232 $ hg annotate -nlf b
232 $ hg annotate -nlf b
233 0 a:1: a
233 0 a:1: a
234 1 a:2: a
234 1 a:2: a
235 1 a:3: a
235 1 a:3: a
236 4 b:4: b4
236 4 b:4: b4
237 4 b:5: c
237 4 b:5: c
238 4 b:6: b5
238 4 b:6: b5
239
239
240 $ hg up -C 1
240 $ hg up -C 1
241 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
241 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
242 $ hg cp a b
242 $ hg cp a b
243 $ cat <<EOF > b
243 $ cat <<EOF > b
244 > a
244 > a
245 > z
245 > z
246 > a
246 > a
247 > EOF
247 > EOF
248 $ hg ci -mc -d '3 0'
248 $ hg ci -mc -d '3 0'
249 created new head
249 created new head
250 $ hg merge
250 $ hg merge
251 merging b
251 merging b
252 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
252 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
253 (branch merge, don't forget to commit)
253 (branch merge, don't forget to commit)
254 $ cat <<EOF >> b
254 $ cat <<EOF >> b
255 > b4
255 > b4
256 > c
256 > c
257 > b5
257 > b5
258 > EOF
258 > EOF
259 $ echo d >> b
259 $ echo d >> b
260 $ hg ci -mmerge2 -d '4 0'
260 $ hg ci -mmerge2 -d '4 0'
261
261
262 annotate after rename merge
262 annotate after rename merge
263
263
264 $ hg annotate -nf b
264 $ hg annotate -nf b
265 0 a: a
265 0 a: a
266 6 b: z
266 6 b: z
267 1 a: a
267 1 a: a
268 3 b: b4
268 3 b: b4
269 4 b: c
269 4 b: c
270 3 b: b5
270 3 b: b5
271 7 b: d
271 7 b: d
272
272
273 annotate after rename merge with -l
273 annotate after rename merge with -l
274 (fastannotate differs from annotate)
274 (fastannotate differs from annotate)
275
275
276 $ hg log -Gp -T '{rev}:{node}' -r '0+1+6+7'
276 $ hg log -Gp -T '{rev}:{node}' -r '0+1+6+7'
277 @ 7:6284bb6c38fef984a929862a53bbc71ce9eafa81diff --git a/b b/b
277 @ 7:6284bb6c38fef984a929862a53bbc71ce9eafa81diff --git a/b b/b
278 |\ --- a/b
278 |\ --- a/b
279 | : +++ b/b
279 | : +++ b/b
280 | : @@ -1,3 +1,7 @@
280 | : @@ -1,3 +1,7 @@
281 | : a
281 | : a
282 | : z
282 | : z
283 | : a
283 | : a
284 | : +b4
284 | : +b4
285 | : +c
285 | : +c
286 | : +b5
286 | : +b5
287 | : +d
287 | : +d
288 | :
288 | :
289 o : 6:b80e3e32f75a6a67cd4ac85496a11511e9112816diff --git a/a b/b
289 o : 6:b80e3e32f75a6a67cd4ac85496a11511e9112816diff --git a/a b/b
290 :/ copy from a
290 :/ copy from a
291 : copy to b
291 : copy to b
292 : --- a/a
292 : --- a/a
293 : +++ b/b
293 : +++ b/b
294 : @@ -1,3 +1,3 @@
294 : @@ -1,3 +1,3 @@
295 : -a (?)
295 : -a (?)
296 : a
296 : a
297 : +z
297 : +z
298 : a
298 : a
299 : -a (?)
299 : -a (?)
300 :
300 :
301 o 1:762f04898e6684ff713415f7b8a8d53d33f96c92diff --git a/a b/a
301 o 1:762f04898e6684ff713415f7b8a8d53d33f96c92diff --git a/a b/a
302 | --- a/a
302 | --- a/a
303 | +++ b/a
303 | +++ b/a
304 | @@ -1,1 +1,3 @@
304 | @@ -1,1 +1,3 @@
305 | a
305 | a
306 | +a
306 | +a
307 | +a
307 | +a
308 |
308 |
309 o 0:8435f90966e442695d2ded29fdade2bac5ad8065diff --git a/a b/a
309 o 0:8435f90966e442695d2ded29fdade2bac5ad8065diff --git a/a b/a
310 new file mode 100644
310 new file mode 100644
311 --- /dev/null
311 --- /dev/null
312 +++ b/a
312 +++ b/a
313 @@ -0,0 +1,1 @@
313 @@ -0,0 +1,1 @@
314 +a
314 +a
315
315
316
316
317 (note on question marks:
317 (note on question marks:
318 the upstream bdiff change (96f2f50d923f+3633403888ae+8c0c75aa3ff4+5c4e2636c1a9
318 the upstream bdiff change (96f2f50d923f+3633403888ae+8c0c75aa3ff4+5c4e2636c1a9
319 +38ed54888617) alters the output so deletion is not always at the end of the
319 +38ed54888617) alters the output so deletion is not always at the end of the
320 output. for example:
320 output. for example:
321 | a | b | old | new | # old: e1d6aa0e4c3a, new: 8836f13e3c5b
321 | a | b | old | new | # old: e1d6aa0e4c3a, new: 8836f13e3c5b
322 |-------------------|
322 |-------------------|
323 | a | a | a | -a |
323 | a | a | a | -a |
324 | a | z | +z | a |
324 | a | z | +z | a |
325 | a | a | a | +z |
325 | a | a | a | +z |
326 | | | -a | a |
326 | | | -a | a |
327 |-------------------|
327 |-------------------|
328 | a | a | a |
328 | a | a | a |
329 | a | a | a |
329 | a | a | a |
330 | a | | -a |
330 | a | | -a |
331 this leads to more question marks below)
331 this leads to more question marks below)
332
332
333 (rev 1 adds two "a"s and rev 6 deletes one "a".
333 (rev 1 adds two "a"s and rev 6 deletes one "a".
334 the "a" that rev 6 deletes could be either the first or the second "a" of those two "a"s added by rev 1.
334 the "a" that rev 6 deletes could be either the first or the second "a" of those two "a"s added by rev 1.
335 and that causes the line number difference)
335 and that causes the line number difference)
336
336
337 $ hg annotate -nlf b --config fastannotate.modes=
337 $ hg annotate -nlf b --config fastannotate.modes=
338 0 a:1: a
338 0 a:1: a
339 6 b:2: z
339 6 b:2: z
340 1 a:3: a
340 1 a:3: a
341 3 b:4: b4
341 3 b:4: b4
342 4 b:5: c
342 4 b:5: c
343 3 b:5: b5
343 3 b:5: b5
344 7 b:7: d
344 7 b:7: d
345
345
346 $ hg annotate -nlf b
346 $ hg annotate -nlf b
347 0 a:1: a (?)
347 0 a:1: a (?)
348 1 a:2: a (?)
348 1 a:2: a (?)
349 6 b:2: z
349 6 b:2: z
350 1 a:2: a (?)
350 1 a:2: a (?)
351 1 a:3: a (?)
351 1 a:3: a (?)
352 3 b:4: b4
352 3 b:4: b4
353 4 b:5: c
353 4 b:5: c
354 3 b:5: b5
354 3 b:5: b5
355 7 b:7: d
355 7 b:7: d
356
356
357 Issue2807: alignment of line numbers with -l
357 Issue2807: alignment of line numbers with -l
358 (fastannotate differs from annotate, same reason as above)
358 (fastannotate differs from annotate, same reason as above)
359
359
360 $ echo more >> b
360 $ echo more >> b
361 $ hg ci -mmore -d '5 0'
361 $ hg ci -mmore -d '5 0'
362 $ echo more >> b
362 $ echo more >> b
363 $ hg ci -mmore -d '6 0'
363 $ hg ci -mmore -d '6 0'
364 $ echo more >> b
364 $ echo more >> b
365 $ hg ci -mmore -d '7 0'
365 $ hg ci -mmore -d '7 0'
366 $ hg annotate -nlf b
366 $ hg annotate -nlf b
367 0 a: 1: a (?)
367 0 a: 1: a (?)
368 1 a: 2: a (?)
368 1 a: 2: a (?)
369 6 b: 2: z
369 6 b: 2: z
370 1 a: 2: a (?)
370 1 a: 2: a (?)
371 1 a: 3: a (?)
371 1 a: 3: a (?)
372 3 b: 4: b4
372 3 b: 4: b4
373 4 b: 5: c
373 4 b: 5: c
374 3 b: 5: b5
374 3 b: 5: b5
375 7 b: 7: d
375 7 b: 7: d
376 8 b: 8: more
376 8 b: 8: more
377 9 b: 9: more
377 9 b: 9: more
378 10 b:10: more
378 10 b:10: more
379
379
380 linkrev vs rev
380 linkrev vs rev
381
381
382 $ hg annotate -r tip -n a
382 $ hg annotate -r tip -n a
383 0: a
383 0: a
384 1: a
384 1: a
385 1: a
385 1: a
386
386
387 linkrev vs rev with -l
387 linkrev vs rev with -l
388
388
389 $ hg annotate -r tip -nl a
389 $ hg annotate -r tip -nl a
390 0:1: a
390 0:1: a
391 1:2: a
391 1:2: a
392 1:3: a
392 1:3: a
393
393
394 Issue589: "undelete" sequence leads to crash
394 Issue589: "undelete" sequence leads to crash
395
395
396 annotate was crashing when trying to --follow something
396 annotate was crashing when trying to --follow something
397
397
398 like A -> B -> A
398 like A -> B -> A
399
399
400 generate ABA rename configuration
400 generate ABA rename configuration
401
401
402 $ echo foo > foo
402 $ echo foo > foo
403 $ hg add foo
403 $ hg add foo
404 $ hg ci -m addfoo
404 $ hg ci -m addfoo
405 $ hg rename foo bar
405 $ hg rename foo bar
406 $ hg ci -m renamefoo
406 $ hg ci -m renamefoo
407 $ hg rename bar foo
407 $ hg rename bar foo
408 $ hg ci -m renamebar
408 $ hg ci -m renamebar
409
409
410 annotate after ABA with follow
410 annotate after ABA with follow
411
411
412 $ hg annotate --follow foo
412 $ hg annotate --follow foo
413 foo: foo
413 foo: foo
414
414
415 missing file
415 missing file
416
416
417 $ hg ann nosuchfile
417 $ hg ann nosuchfile
418 abort: nosuchfile: no such file in rev e9e6b4fa872f
418 abort: nosuchfile: no such file in rev e9e6b4fa872f
419 [255]
419 [255]
420
420
421 annotate file without '\n' on last line
421 annotate file without '\n' on last line
422
422
423 $ printf "" > c
423 $ printf "" > c
424 $ hg ci -A -m test -u nobody -d '1 0'
424 $ hg ci -A -m test -u nobody -d '1 0'
425 adding c
425 adding c
426 $ hg annotate c
426 $ hg annotate c
427 $ printf "a\nb" > c
427 $ printf "a\nb" > c
428 $ hg ci -m test
428 $ hg ci -m test
429 $ hg annotate c
429 $ hg annotate c
430 [0-9]+: a (re)
430 [0-9]+: a (re)
431 [0-9]+: b (re)
431 [0-9]+: b (re)
432
432
433 Issue3841: check annotation of the file of which filelog includes
433 Issue3841: check annotation of the file of which filelog includes
434 merging between the revision and its ancestor
434 merging between the revision and its ancestor
435
435
436 to reproduce the situation with recent Mercurial, this script uses (1)
436 to reproduce the situation with recent Mercurial, this script uses (1)
437 "hg debugsetparents" to merge without ancestor check by "hg merge",
437 "hg debugsetparents" to merge without ancestor check by "hg merge",
438 and (2) the extension to allow filelog merging between the revision
438 and (2) the extension to allow filelog merging between the revision
439 and its ancestor by overriding "repo._filecommit".
439 and its ancestor by overriding "repo._filecommit".
440
440
441 $ cat > ../legacyrepo.py <<EOF
441 $ cat > ../legacyrepo.py <<EOF
442 > from mercurial import error, node
442 > from mercurial import error, node
443 > def reposetup(ui, repo):
443 > def reposetup(ui, repo):
444 > class legacyrepo(repo.__class__):
444 > class legacyrepo(repo.__class__):
445 > def _filecommit(self, fctx, manifest1, manifest2,
445 > def _filecommit(self, fctx, manifest1, manifest2,
446 > linkrev, tr, changelist):
446 > linkrev, tr, changelist, includecopymeta):
447 > fname = fctx.path()
447 > fname = fctx.path()
448 > text = fctx.data()
448 > text = fctx.data()
449 > flog = self.file(fname)
449 > flog = self.file(fname)
450 > fparent1 = manifest1.get(fname, node.nullid)
450 > fparent1 = manifest1.get(fname, node.nullid)
451 > fparent2 = manifest2.get(fname, node.nullid)
451 > fparent2 = manifest2.get(fname, node.nullid)
452 > meta = {}
452 > meta = {}
453 > copy = fctx.renamed()
453 > copy = fctx.renamed()
454 > if copy and copy[0] != fname:
454 > if copy and copy[0] != fname:
455 > raise error.Abort('copying is not supported')
455 > raise error.Abort('copying is not supported')
456 > if fparent2 != node.nullid:
456 > if fparent2 != node.nullid:
457 > changelist.append(fname)
457 > changelist.append(fname)
458 > return flog.add(text, meta, tr, linkrev,
458 > return flog.add(text, meta, tr, linkrev,
459 > fparent1, fparent2)
459 > fparent1, fparent2)
460 > raise error.Abort('only merging is supported')
460 > raise error.Abort('only merging is supported')
461 > repo.__class__ = legacyrepo
461 > repo.__class__ = legacyrepo
462 > EOF
462 > EOF
463
463
464 $ cat > baz <<EOF
464 $ cat > baz <<EOF
465 > 1
465 > 1
466 > 2
466 > 2
467 > 3
467 > 3
468 > 4
468 > 4
469 > 5
469 > 5
470 > EOF
470 > EOF
471 $ hg add baz
471 $ hg add baz
472 $ hg commit -m "baz:0"
472 $ hg commit -m "baz:0"
473
473
474 $ cat > baz <<EOF
474 $ cat > baz <<EOF
475 > 1 baz:1
475 > 1 baz:1
476 > 2
476 > 2
477 > 3
477 > 3
478 > 4
478 > 4
479 > 5
479 > 5
480 > EOF
480 > EOF
481 $ hg commit -m "baz:1"
481 $ hg commit -m "baz:1"
482
482
483 $ cat > baz <<EOF
483 $ cat > baz <<EOF
484 > 1 baz:1
484 > 1 baz:1
485 > 2 baz:2
485 > 2 baz:2
486 > 3
486 > 3
487 > 4
487 > 4
488 > 5
488 > 5
489 > EOF
489 > EOF
490 $ hg debugsetparents 17 17
490 $ hg debugsetparents 17 17
491 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:2"
491 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:2"
492 $ hg debugindexdot baz
492 $ hg debugindexdot baz
493 digraph G {
493 digraph G {
494 -1 -> 0
494 -1 -> 0
495 0 -> 1
495 0 -> 1
496 1 -> 2
496 1 -> 2
497 1 -> 2
497 1 -> 2
498 }
498 }
499 $ hg annotate baz
499 $ hg annotate baz
500 17: 1 baz:1
500 17: 1 baz:1
501 18: 2 baz:2
501 18: 2 baz:2
502 16: 3
502 16: 3
503 16: 4
503 16: 4
504 16: 5
504 16: 5
505
505
506 $ cat > baz <<EOF
506 $ cat > baz <<EOF
507 > 1 baz:1
507 > 1 baz:1
508 > 2 baz:2
508 > 2 baz:2
509 > 3 baz:3
509 > 3 baz:3
510 > 4
510 > 4
511 > 5
511 > 5
512 > EOF
512 > EOF
513 $ hg commit -m "baz:3"
513 $ hg commit -m "baz:3"
514
514
515 $ cat > baz <<EOF
515 $ cat > baz <<EOF
516 > 1 baz:1
516 > 1 baz:1
517 > 2 baz:2
517 > 2 baz:2
518 > 3 baz:3
518 > 3 baz:3
519 > 4 baz:4
519 > 4 baz:4
520 > 5
520 > 5
521 > EOF
521 > EOF
522 $ hg debugsetparents 19 18
522 $ hg debugsetparents 19 18
523 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:4"
523 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:4"
524 $ hg debugindexdot baz
524 $ hg debugindexdot baz
525 digraph G {
525 digraph G {
526 -1 -> 0
526 -1 -> 0
527 0 -> 1
527 0 -> 1
528 1 -> 2
528 1 -> 2
529 1 -> 2
529 1 -> 2
530 2 -> 3
530 2 -> 3
531 3 -> 4
531 3 -> 4
532 2 -> 4
532 2 -> 4
533 }
533 }
534 $ hg annotate baz
534 $ hg annotate baz
535 17: 1 baz:1
535 17: 1 baz:1
536 18: 2 baz:2
536 18: 2 baz:2
537 19: 3 baz:3
537 19: 3 baz:3
538 20: 4 baz:4
538 20: 4 baz:4
539 16: 5
539 16: 5
540
540
541 annotate clean file
541 annotate clean file
542
542
543 $ hg annotate -ncr "wdir()" foo
543 $ hg annotate -ncr "wdir()" foo
544 11 472b18db256d : foo
544 11 472b18db256d : foo
545
545
546 annotate modified file
546 annotate modified file
547
547
548 $ echo foofoo >> foo
548 $ echo foofoo >> foo
549 $ hg annotate -r "wdir()" foo
549 $ hg annotate -r "wdir()" foo
550 11 : foo
550 11 : foo
551 20+: foofoo
551 20+: foofoo
552
552
553 $ hg annotate -cr "wdir()" foo
553 $ hg annotate -cr "wdir()" foo
554 472b18db256d : foo
554 472b18db256d : foo
555 b6bedd5477e7+: foofoo
555 b6bedd5477e7+: foofoo
556
556
557 $ hg annotate -ncr "wdir()" foo
557 $ hg annotate -ncr "wdir()" foo
558 11 472b18db256d : foo
558 11 472b18db256d : foo
559 20 b6bedd5477e7+: foofoo
559 20 b6bedd5477e7+: foofoo
560
560
561 $ hg annotate --debug -ncr "wdir()" foo
561 $ hg annotate --debug -ncr "wdir()" foo
562 11 472b18db256d1e8282064eab4bfdaf48cbfe83cd : foo
562 11 472b18db256d1e8282064eab4bfdaf48cbfe83cd : foo
563 20 b6bedd5477e797f25e568a6402d4697f3f895a72+: foofoo
563 20 b6bedd5477e797f25e568a6402d4697f3f895a72+: foofoo
564
564
565 $ hg annotate -udr "wdir()" foo
565 $ hg annotate -udr "wdir()" foo
566 test Thu Jan 01 00:00:00 1970 +0000: foo
566 test Thu Jan 01 00:00:00 1970 +0000: foo
567 test [A-Za-z0-9:+ ]+: foofoo (re)
567 test [A-Za-z0-9:+ ]+: foofoo (re)
568
568
569 $ hg annotate -ncr "wdir()" -Tjson foo
569 $ hg annotate -ncr "wdir()" -Tjson foo
570 [
570 [
571 {
571 {
572 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": "ffffffffffffffffffffffffffffffffffffffff", "rev": 2147483647}],
572 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": "ffffffffffffffffffffffffffffffffffffffff", "rev": 2147483647}],
573 "path": "foo"
573 "path": "foo"
574 }
574 }
575 ]
575 ]
576
576
577 annotate added file
577 annotate added file
578
578
579 $ echo bar > bar
579 $ echo bar > bar
580 $ hg add bar
580 $ hg add bar
581 $ hg annotate -ncr "wdir()" bar
581 $ hg annotate -ncr "wdir()" bar
582 20 b6bedd5477e7+: bar
582 20 b6bedd5477e7+: bar
583
583
584 annotate renamed file
584 annotate renamed file
585
585
586 $ hg rename foo renamefoo2
586 $ hg rename foo renamefoo2
587 $ hg annotate -ncr "wdir()" renamefoo2
587 $ hg annotate -ncr "wdir()" renamefoo2
588 11 472b18db256d : foo
588 11 472b18db256d : foo
589 20 b6bedd5477e7+: foofoo
589 20 b6bedd5477e7+: foofoo
590
590
591 annotate missing file
591 annotate missing file
592
592
593 $ rm baz
593 $ rm baz
594 $ hg annotate -ncr "wdir()" baz
594 $ hg annotate -ncr "wdir()" baz
595 abort: $TESTTMP/repo/baz: $ENOENT$ (windows !)
595 abort: $TESTTMP/repo/baz: $ENOENT$ (windows !)
596 abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
596 abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
597 [255]
597 [255]
598
598
599 annotate removed file
599 annotate removed file
600
600
601 $ hg rm baz
601 $ hg rm baz
602 $ hg annotate -ncr "wdir()" baz
602 $ hg annotate -ncr "wdir()" baz
603 abort: $TESTTMP/repo/baz: $ENOENT$ (windows !)
603 abort: $TESTTMP/repo/baz: $ENOENT$ (windows !)
604 abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
604 abort: $ENOENT$: '$TESTTMP/repo/baz' (no-windows !)
605 [255]
605 [255]
606
606
607 Test annotate with whitespace options
607 Test annotate with whitespace options
608
608
609 $ cd ..
609 $ cd ..
610 $ hg init repo-ws
610 $ hg init repo-ws
611 $ cd repo-ws
611 $ cd repo-ws
612 $ cat > a <<EOF
612 $ cat > a <<EOF
613 > aa
613 > aa
614 >
614 >
615 > b b
615 > b b
616 > EOF
616 > EOF
617 $ hg ci -Am "adda"
617 $ hg ci -Am "adda"
618 adding a
618 adding a
619 $ sed 's/EOL$//g' > a <<EOF
619 $ sed 's/EOL$//g' > a <<EOF
620 > a a
620 > a a
621 >
621 >
622 > EOL
622 > EOL
623 > b b
623 > b b
624 > EOF
624 > EOF
625 $ hg ci -m "changea"
625 $ hg ci -m "changea"
626
626
627 Annotate with no option
627 Annotate with no option
628
628
629 $ hg annotate a
629 $ hg annotate a
630 1: a a
630 1: a a
631 0:
631 0:
632 1:
632 1:
633 1: b b
633 1: b b
634
634
635 Annotate with --ignore-space-change
635 Annotate with --ignore-space-change
636
636
637 $ hg annotate --ignore-space-change a
637 $ hg annotate --ignore-space-change a
638 1: a a
638 1: a a
639 1:
639 1:
640 0:
640 0:
641 0: b b
641 0: b b
642
642
643 Annotate with --ignore-all-space
643 Annotate with --ignore-all-space
644
644
645 $ hg annotate --ignore-all-space a
645 $ hg annotate --ignore-all-space a
646 0: a a
646 0: a a
647 0:
647 0:
648 1:
648 1:
649 0: b b
649 0: b b
650
650
651 Annotate with --ignore-blank-lines (similar to no options case)
651 Annotate with --ignore-blank-lines (similar to no options case)
652
652
653 $ hg annotate --ignore-blank-lines a
653 $ hg annotate --ignore-blank-lines a
654 1: a a
654 1: a a
655 0:
655 0:
656 1:
656 1:
657 1: b b
657 1: b b
658
658
659 $ cd ..
659 $ cd ..
660
660
661 Annotate with linkrev pointing to another branch
661 Annotate with linkrev pointing to another branch
662 ------------------------------------------------
662 ------------------------------------------------
663
663
664 create history with a filerev whose linkrev points to another branch
664 create history with a filerev whose linkrev points to another branch
665
665
666 $ hg init branchedlinkrev
666 $ hg init branchedlinkrev
667 $ cd branchedlinkrev
667 $ cd branchedlinkrev
668 $ echo A > a
668 $ echo A > a
669 $ hg commit -Am 'contentA'
669 $ hg commit -Am 'contentA'
670 adding a
670 adding a
671 $ echo B >> a
671 $ echo B >> a
672 $ hg commit -m 'contentB'
672 $ hg commit -m 'contentB'
673 $ hg up --rev 'desc(contentA)'
673 $ hg up --rev 'desc(contentA)'
674 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
674 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
675 $ echo unrelated > unrelated
675 $ echo unrelated > unrelated
676 $ hg commit -Am 'unrelated'
676 $ hg commit -Am 'unrelated'
677 adding unrelated
677 adding unrelated
678 created new head
678 created new head
679 $ hg graft -r 'desc(contentB)'
679 $ hg graft -r 'desc(contentB)'
680 grafting 1:fd27c222e3e6 "contentB"
680 grafting 1:fd27c222e3e6 "contentB"
681 $ echo C >> a
681 $ echo C >> a
682 $ hg commit -m 'contentC'
682 $ hg commit -m 'contentC'
683 $ echo W >> a
683 $ echo W >> a
684 $ hg log -G
684 $ hg log -G
685 @ changeset: 4:072f1e8df249
685 @ changeset: 4:072f1e8df249
686 | tag: tip
686 | tag: tip
687 | user: test
687 | user: test
688 | date: Thu Jan 01 00:00:00 1970 +0000
688 | date: Thu Jan 01 00:00:00 1970 +0000
689 | summary: contentC
689 | summary: contentC
690 |
690 |
691 o changeset: 3:ff38df03cc4b
691 o changeset: 3:ff38df03cc4b
692 | user: test
692 | user: test
693 | date: Thu Jan 01 00:00:00 1970 +0000
693 | date: Thu Jan 01 00:00:00 1970 +0000
694 | summary: contentB
694 | summary: contentB
695 |
695 |
696 o changeset: 2:62aaf3f6fc06
696 o changeset: 2:62aaf3f6fc06
697 | parent: 0:f0932f74827e
697 | parent: 0:f0932f74827e
698 | user: test
698 | user: test
699 | date: Thu Jan 01 00:00:00 1970 +0000
699 | date: Thu Jan 01 00:00:00 1970 +0000
700 | summary: unrelated
700 | summary: unrelated
701 |
701 |
702 | o changeset: 1:fd27c222e3e6
702 | o changeset: 1:fd27c222e3e6
703 |/ user: test
703 |/ user: test
704 | date: Thu Jan 01 00:00:00 1970 +0000
704 | date: Thu Jan 01 00:00:00 1970 +0000
705 | summary: contentB
705 | summary: contentB
706 |
706 |
707 o changeset: 0:f0932f74827e
707 o changeset: 0:f0932f74827e
708 user: test
708 user: test
709 date: Thu Jan 01 00:00:00 1970 +0000
709 date: Thu Jan 01 00:00:00 1970 +0000
710 summary: contentA
710 summary: contentA
711
711
712
712
713 Annotate should list ancestor of starting revision only
713 Annotate should list ancestor of starting revision only
714
714
715 $ hg annotate a
715 $ hg annotate a
716 0: A
716 0: A
717 3: B
717 3: B
718 4: C
718 4: C
719
719
720 $ hg annotate a -r 'wdir()'
720 $ hg annotate a -r 'wdir()'
721 0 : A
721 0 : A
722 3 : B
722 3 : B
723 4 : C
723 4 : C
724 4+: W
724 4+: W
725
725
726 Even when the starting revision is the linkrev-shadowed one:
726 Even when the starting revision is the linkrev-shadowed one:
727
727
728 $ hg annotate a -r 3
728 $ hg annotate a -r 3
729 0: A
729 0: A
730 3: B
730 3: B
731
731
732 $ cd ..
732 $ cd ..
733
733
734 Issue5360: Deleted chunk in p1 of a merge changeset
734 Issue5360: Deleted chunk in p1 of a merge changeset
735
735
736 $ hg init repo-5360
736 $ hg init repo-5360
737 $ cd repo-5360
737 $ cd repo-5360
738 $ echo 1 > a
738 $ echo 1 > a
739 $ hg commit -A a -m 1
739 $ hg commit -A a -m 1
740 $ echo 2 >> a
740 $ echo 2 >> a
741 $ hg commit -m 2
741 $ hg commit -m 2
742 $ echo a > a
742 $ echo a > a
743 $ hg commit -m a
743 $ hg commit -m a
744 $ hg update '.^' -q
744 $ hg update '.^' -q
745 $ echo 3 >> a
745 $ echo 3 >> a
746 $ hg commit -m 3 -q
746 $ hg commit -m 3 -q
747 $ hg merge 2 -q
747 $ hg merge 2 -q
748 $ cat > a << EOF
748 $ cat > a << EOF
749 > b
749 > b
750 > 1
750 > 1
751 > 2
751 > 2
752 > 3
752 > 3
753 > a
753 > a
754 > EOF
754 > EOF
755 $ hg resolve --mark -q
755 $ hg resolve --mark -q
756 $ hg commit -m m
756 $ hg commit -m m
757 $ hg annotate a
757 $ hg annotate a
758 4: b
758 4: b
759 0: 1
759 0: 1
760 1: 2
760 1: 2
761 3: 3
761 3: 3
762 2: a
762 2: a
763
763
764 $ cd ..
764 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now